summaryrefslogtreecommitdiffstats
path: root/Monitoring/src
diff options
context:
space:
mode:
authorpikusa <pikusa@man.poznan.pl>2013-04-03 13:18:17 (GMT)
committer pikusa <pikusa@man.poznan.pl>2013-04-03 13:18:17 (GMT)
commit2f2a3a129c91de540e66c3bfbe30b0df1942cd4b (patch)
tree2d313cdf0068af368d4de6067d676be16f6a6464 /Monitoring/src
parentff8aa232b071a9b54dff833714a870fd0aec0b30 (diff)
downloadnovi-public-2f2a3a129c91de540e66c3bfbe30b0df1942cd4b.zip
novi-public-2f2a3a129c91de540e66c3bfbe30b0df1942cd4b.tar.gz
novi-public-2f2a3a129c91de540e66c3bfbe30b0df1942cd4b.tar.bz2
project commit and dir tree change
Diffstat (limited to 'Monitoring/src')
-rw-r--r--Monitoring/src/main/java/eu/novi/monitoring/Activator.java128
-rw-r--r--Monitoring/src/main/java/eu/novi/monitoring/MonDiscovery.java22
-rw-r--r--Monitoring/src/main/java/eu/novi/monitoring/MonDiscoveryImpl.java104
-rw-r--r--Monitoring/src/main/java/eu/novi/monitoring/MonSrv.java86
-rw-r--r--Monitoring/src/main/java/eu/novi/monitoring/MonSrvFactory.java12
-rw-r--r--Monitoring/src/main/java/eu/novi/monitoring/Wiring.java14
-rw-r--r--Monitoring/src/main/java/eu/novi/monitoring/credential/Credential.java5
-rw-r--r--Monitoring/src/main/java/eu/novi/monitoring/credential/UsernamePassword.java17
-rw-r--r--Monitoring/src/main/java/eu/novi/monitoring/credential/UsernameRSAKey.java19
-rw-r--r--Monitoring/src/main/java/eu/novi/monitoring/util/MonitoringQuery.java11
-rw-r--r--Monitoring/src/main/python/Credential/SshKeygen.py99
-rw-r--r--Monitoring/src/main/python/Credential/SshKeygen.py.old100
-rw-r--r--Monitoring/src/main/python/Credential/__init__$py.classbin0 -> 2070 bytes
-rw-r--r--Monitoring/src/main/python/Credential/__init__.py0
-rw-r--r--Monitoring/src/main/python/Credential/__init__.py.old0
-rw-r--r--Monitoring/src/main/python/Credential/credentialtypes$py.classbin0 -> 5584 bytes
-rw-r--r--Monitoring/src/main/python/Credential/credentialtypes.py52
-rw-r--r--Monitoring/src/main/python/Credential/credentialtypes.py.old52
-rw-r--r--Monitoring/src/main/python/Credential/test.py46
-rw-r--r--Monitoring/src/main/python/Credential/test.py.old46
-rw-r--r--Monitoring/src/main/python/DataProcessing/Aggregator$py.classbin0 -> 24132 bytes
-rw-r--r--Monitoring/src/main/python/DataProcessing/Aggregator.py265
-rw-r--r--Monitoring/src/main/python/DataProcessing/Aggregator.py.old251
-rw-r--r--Monitoring/src/main/python/DataProcessing/AggregatorManager$py.classbin0 -> 7416 bytes
-rw-r--r--Monitoring/src/main/python/DataProcessing/AggregatorManager.py40
-rw-r--r--Monitoring/src/main/python/DataProcessing/AggregatorManager.py.old39
-rw-r--r--Monitoring/src/main/python/DataProcessing/Data$py.classbin0 -> 24886 bytes
-rw-r--r--Monitoring/src/main/python/DataProcessing/Data.py296
-rw-r--r--Monitoring/src/main/python/DataProcessing/Data.py.old290
-rw-r--r--Monitoring/src/main/python/DataProcessing/DataError$py.classbin0 -> 4510 bytes
-rw-r--r--Monitoring/src/main/python/DataProcessing/DataError.py29
-rw-r--r--Monitoring/src/main/python/DataProcessing/DataError.py.old29
-rw-r--r--Monitoring/src/main/python/DataProcessing/DataFormatter$py.classbin0 -> 13851 bytes
-rw-r--r--Monitoring/src/main/python/DataProcessing/DataFormatter.py145
-rw-r--r--Monitoring/src/main/python/DataProcessing/DataFormatter.py.old140
-rw-r--r--Monitoring/src/main/python/DataProcessing/DataHeader$py.classbin0 -> 17821 bytes
-rw-r--r--Monitoring/src/main/python/DataProcessing/DataHeader.py202
-rw-r--r--Monitoring/src/main/python/DataProcessing/DataHeader.py.old200
-rw-r--r--Monitoring/src/main/python/DataProcessing/DataHeaderCell$py.classbin0 -> 14508 bytes
-rw-r--r--Monitoring/src/main/python/DataProcessing/DataHeaderCell.py155
-rw-r--r--Monitoring/src/main/python/DataProcessing/DataHeaderCell.py.old155
-rw-r--r--Monitoring/src/main/python/DataProcessing/DataIndex.py39
-rw-r--r--Monitoring/src/main/python/DataProcessing/DataIndex.py.old39
-rw-r--r--Monitoring/src/main/python/DataProcessing/DataReader$py.classbin0 -> 16621 bytes
-rw-r--r--Monitoring/src/main/python/DataProcessing/DataReader.py161
-rw-r--r--Monitoring/src/main/python/DataProcessing/DataReader.py.old161
-rw-r--r--Monitoring/src/main/python/DataProcessing/DataSource$py.classbin0 -> 10234 bytes
-rw-r--r--Monitoring/src/main/python/DataProcessing/DataSource.py96
-rw-r--r--Monitoring/src/main/python/DataProcessing/DataSource.py.old91
-rw-r--r--Monitoring/src/main/python/DataProcessing/Dimension$py.classbin0 -> 26585 bytes
-rw-r--r--Monitoring/src/main/python/DataProcessing/Dimension.py306
-rw-r--r--Monitoring/src/main/python/DataProcessing/Dimension.py.old295
-rw-r--r--Monitoring/src/main/python/DataProcessing/MeasurementLevel$py.classbin0 -> 5487 bytes
-rw-r--r--Monitoring/src/main/python/DataProcessing/MeasurementLevel.py46
-rw-r--r--Monitoring/src/main/python/DataProcessing/MeasurementLevel.py.old46
-rw-r--r--Monitoring/src/main/python/DataProcessing/Parameter$py.classbin0 -> 22697 bytes
-rw-r--r--Monitoring/src/main/python/DataProcessing/Parameter.py283
-rw-r--r--Monitoring/src/main/python/DataProcessing/Parameter.py.old280
-rw-r--r--Monitoring/src/main/python/DataProcessing/Prefix$py.classbin0 -> 11881 bytes
-rw-r--r--Monitoring/src/main/python/DataProcessing/Prefix.py119
-rw-r--r--Monitoring/src/main/python/DataProcessing/Prefix.py.old113
-rw-r--r--Monitoring/src/main/python/DataProcessing/Sampler$py.classbin0 -> 17561 bytes
-rw-r--r--Monitoring/src/main/python/DataProcessing/Sampler.py195
-rw-r--r--Monitoring/src/main/python/DataProcessing/Sampler.py.old191
-rw-r--r--Monitoring/src/main/python/DataProcessing/Unit$py.classbin0 -> 25970 bytes
-rw-r--r--Monitoring/src/main/python/DataProcessing/Unit.py298
-rw-r--r--Monitoring/src/main/python/DataProcessing/Unit.py.old294
-rw-r--r--Monitoring/src/main/python/DataProcessing/__init__$py.classbin0 -> 2082 bytes
-rw-r--r--Monitoring/src/main/python/DataProcessing/__init__.py0
-rw-r--r--Monitoring/src/main/python/DataProcessing/__init__.py.old0
-rw-r--r--Monitoring/src/main/python/DataProcessing/test.py372
-rw-r--r--Monitoring/src/main/python/DataProcessing/test.py.old368
-rw-r--r--Monitoring/src/main/python/Database/ConnectionPool.py17
-rw-r--r--Monitoring/src/main/python/Database/ConnectionPool.py.old17
-rw-r--r--Monitoring/src/main/python/Database/DatabaseAccess.py20
-rw-r--r--Monitoring/src/main/python/Database/DatabaseAccess.py.old20
-rw-r--r--Monitoring/src/main/python/Database/StorageFIFO.py17
-rw-r--r--Monitoring/src/main/python/Database/StorageFIFO.py.old17
-rw-r--r--Monitoring/src/main/python/Database/__init__.py0
-rw-r--r--Monitoring/src/main/python/Database/__init__.py.old0
-rw-r--r--Monitoring/src/main/python/Database/test.py26
-rw-r--r--Monitoring/src/main/python/Database/test.py.old26
-rw-r--r--Monitoring/src/main/python/Driver/Driver$py.classbin0 -> 3412 bytes
-rw-r--r--Monitoring/src/main/python/Driver/Driver.py17
-rw-r--r--Monitoring/src/main/python/Driver/Driver.py.old17
-rw-r--r--Monitoring/src/main/python/Driver/REST.py94
-rw-r--r--Monitoring/src/main/python/Driver/SOAPClient$py.classbin0 -> 3755 bytes
-rw-r--r--Monitoring/src/main/python/Driver/SOAPClient.py18
-rw-r--r--Monitoring/src/main/python/Driver/SOAPClient.py.old18
-rw-r--r--Monitoring/src/main/python/Driver/SshExec$py.classbin0 -> 12676 bytes
-rw-r--r--Monitoring/src/main/python/Driver/SshExec.py187
-rw-r--r--Monitoring/src/main/python/Driver/SshExec.py.old92
-rw-r--r--Monitoring/src/main/python/Driver/SshTunnel.py113
-rw-r--r--Monitoring/src/main/python/Driver/SshTunnel.py.old113
-rw-r--r--Monitoring/src/main/python/Driver/__init__$py.classbin0 -> 2058 bytes
-rw-r--r--Monitoring/src/main/python/Driver/__init__.py0
-rw-r--r--Monitoring/src/main/python/Driver/__init__.py.old0
-rw-r--r--Monitoring/src/main/python/Driver/test.py128
-rw-r--r--Monitoring/src/main/python/Driver/test.py.old122
-rw-r--r--Monitoring/src/main/python/Example/Dimensions$py.classbin0 -> 6325 bytes
-rw-r--r--Monitoring/src/main/python/Example/Dimensions.py42
-rw-r--r--Monitoring/src/main/python/Example/Dimensions.py.old42
-rw-r--r--Monitoring/src/main/python/Example/Metrics$py.classbin0 -> 9470 bytes
-rw-r--r--Monitoring/src/main/python/Example/Metrics.py69
-rw-r--r--Monitoring/src/main/python/Example/Metrics.py.old68
-rw-r--r--Monitoring/src/main/python/Example/Platforms$py.classbin0 -> 3382 bytes
-rw-r--r--Monitoring/src/main/python/Example/Platforms.py9
-rw-r--r--Monitoring/src/main/python/Example/Platforms.py.old9
-rw-r--r--Monitoring/src/main/python/Example/Prefixes$py.classbin0 -> 5468 bytes
-rw-r--r--Monitoring/src/main/python/Example/Prefixes.py23
-rw-r--r--Monitoring/src/main/python/Example/Prefixes.py.old23
-rw-r--r--Monitoring/src/main/python/Example/Resources$py.classbin0 -> 11327 bytes
-rw-r--r--Monitoring/src/main/python/Example/Resources.py61
-rw-r--r--Monitoring/src/main/python/Example/Resources.py.old61
-rw-r--r--Monitoring/src/main/python/Example/Tools$py.classbin0 -> 26660 bytes
-rw-r--r--Monitoring/src/main/python/Example/Tools.py312
-rw-r--r--Monitoring/src/main/python/Example/Tools.py.old311
-rw-r--r--Monitoring/src/main/python/Example/Units$py.classbin0 -> 9282 bytes
-rw-r--r--Monitoring/src/main/python/Example/Units.py81
-rw-r--r--Monitoring/src/main/python/Example/Units.py.old80
-rw-r--r--Monitoring/src/main/python/Example/__init__$py.classbin0 -> 2061 bytes
-rw-r--r--Monitoring/src/main/python/Example/__init__.py0
-rw-r--r--Monitoring/src/main/python/Example/__init__.py.old0
-rw-r--r--Monitoring/src/main/python/Example/credentials$py.classbin0 -> 4940 bytes
-rw-r--r--Monitoring/src/main/python/Example/credentials.py27
-rw-r--r--Monitoring/src/main/python/Example/credentials.py.old27
-rw-r--r--Monitoring/src/main/python/Resource/__init__$py.classbin0 -> 2064 bytes
-rw-r--r--Monitoring/src/main/python/Resource/__init__.py0
-rw-r--r--Monitoring/src/main/python/Resource/__init__.py.old0
-rw-r--r--Monitoring/src/main/python/Resource/interface$py.classbin0 -> 9278 bytes
-rw-r--r--Monitoring/src/main/python/Resource/interface.py79
-rw-r--r--Monitoring/src/main/python/Resource/interface.py.old78
-rw-r--r--Monitoring/src/main/python/Resource/link$py.classbin0 -> 6078 bytes
-rw-r--r--Monitoring/src/main/python/Resource/link.py33
-rw-r--r--Monitoring/src/main/python/Resource/link.py.old35
-rw-r--r--Monitoring/src/main/python/Resource/node$py.classbin0 -> 8533 bytes
-rw-r--r--Monitoring/src/main/python/Resource/node.py49
-rw-r--r--Monitoring/src/main/python/Resource/node.py.old48
-rw-r--r--Monitoring/src/main/python/Resource/path$py.classbin0 -> 3159 bytes
-rw-r--r--Monitoring/src/main/python/Resource/path.py9
-rw-r--r--Monitoring/src/main/python/Resource/path.py.old9
-rw-r--r--Monitoring/src/main/python/Resource/resource$py.classbin0 -> 6525 bytes
-rw-r--r--Monitoring/src/main/python/Resource/resource.py41
-rw-r--r--Monitoring/src/main/python/Resource/resource.py.old44
-rw-r--r--Monitoring/src/main/python/Resource/slice.py37
-rw-r--r--Monitoring/src/main/python/Resource/slice.py.old40
-rw-r--r--Monitoring/src/main/python/Semantics/InformationModel$py.classbin0 -> 12356 bytes
-rw-r--r--Monitoring/src/main/python/Semantics/InformationModel.py88
-rw-r--r--Monitoring/src/main/python/Semantics/InformationModel.py.old79
-rw-r--r--Monitoring/src/main/python/Semantics/Query$py.classbin0 -> 12155 bytes
-rw-r--r--Monitoring/src/main/python/Semantics/Query.py140
-rw-r--r--Monitoring/src/main/python/Semantics/Query.py.old139
-rw-r--r--Monitoring/src/main/python/Semantics/QueryInterpreter$py.classbin0 -> 18767 bytes
-rw-r--r--Monitoring/src/main/python/Semantics/QueryInterpreter.py272
-rw-r--r--Monitoring/src/main/python/Semantics/QueryInterpreter.py.old223
-rw-r--r--Monitoring/src/main/python/Semantics/TaskModel$py.classbin0 -> 29382 bytes
-rw-r--r--Monitoring/src/main/python/Semantics/TaskModel.py436
-rw-r--r--Monitoring/src/main/python/Semantics/TaskModel.py.old424
-rw-r--r--Monitoring/src/main/python/Semantics/UnitModel$py.classbin0 -> 26117 bytes
-rw-r--r--Monitoring/src/main/python/Semantics/UnitModel.py364
-rw-r--r--Monitoring/src/main/python/Semantics/UnitModel.py.old364
-rw-r--r--Monitoring/src/main/python/Semantics/__init__$py.classbin0 -> 2067 bytes
-rw-r--r--Monitoring/src/main/python/Semantics/__init__.py0
-rw-r--r--Monitoring/src/main/python/Semantics/__init__.py.old0
-rw-r--r--Monitoring/src/main/python/Semantics/a0
-rw-r--r--Monitoring/src/main/python/Semantics/b55
-rw-r--r--Monitoring/src/main/python/Semantics/test.py263
-rw-r--r--Monitoring/src/main/python/Semantics/test.py.old250
-rw-r--r--Monitoring/src/main/python/Service/MonSrvImpl.py326
-rw-r--r--Monitoring/src/main/python/Service/MonitoringService$py.classbin0 -> 31900 bytes
-rw-r--r--Monitoring/src/main/python/Service/MonitoringService.py382
-rw-r--r--Monitoring/src/main/python/Service/MonitoringService.py.old354
-rw-r--r--Monitoring/src/main/python/Service/__init__$py.classbin0 -> 2061 bytes
-rw-r--r--Monitoring/src/main/python/Service/__init__.py0
-rw-r--r--Monitoring/src/main/python/Service/__init__.py.old0
-rw-r--r--Monitoring/src/main/python/Service/a38
-rw-r--r--Monitoring/src/main/python/Service/b43
-rw-r--r--Monitoring/src/main/python/Service/interface$py.classbin0 -> 23408 bytes
-rw-r--r--Monitoring/src/main/python/Service/interface.py316
-rw-r--r--Monitoring/src/main/python/Service/interface.py.old308
-rw-r--r--Monitoring/src/main/python/Service/mock_framework$py.classbin0 -> 9540 bytes
-rw-r--r--Monitoring/src/main/python/Service/mock_framework.py69
-rw-r--r--Monitoring/src/main/python/Service/mock_framework.py.old69
-rw-r--r--Monitoring/src/main/python/Service/test.py268
-rw-r--r--Monitoring/src/main/python/Service/test.py.old245
-rw-r--r--Monitoring/src/main/python/Task/EventHandler.py17
-rw-r--r--Monitoring/src/main/python/Task/EventHandler.py.old17
-rw-r--r--Monitoring/src/main/python/Task/Task$py.classbin0 -> 34368 bytes
-rw-r--r--Monitoring/src/main/python/Task/Task.py452
-rw-r--r--Monitoring/src/main/python/Task/Task.py.old427
-rw-r--r--Monitoring/src/main/python/Task/__init__$py.classbin0 -> 2052 bytes
-rw-r--r--Monitoring/src/main/python/Task/__init__.py0
-rw-r--r--Monitoring/src/main/python/Task/__init__.py.old0
-rw-r--r--Monitoring/src/main/python/Task/test.py203
-rw-r--r--Monitoring/src/main/python/Task/test.py.old203
-rw-r--r--Monitoring/src/main/python/Util/MonitoringQueryImpl$py.classbin0 -> 10510 bytes
-rw-r--r--Monitoring/src/main/python/Util/MonitoringQueryImpl.py71
-rw-r--r--Monitoring/src/main/python/Util/__init__$py.classbin0 -> 2060 bytes
-rw-r--r--Monitoring/src/main/python/Util/__init__.py0
-rw-r--r--Monitoring/src/main/python/monitoringmodel/__init__$py.classbin0 -> 2085 bytes
-rw-r--r--Monitoring/src/main/python/monitoringmodel/__init__.py0
-rw-r--r--Monitoring/src/main/python/monitoringmodel/config_federica.owl1269
-rw-r--r--Monitoring/src/main/python/monitoringmodel/config_planetlab.owl1457
-rw-r--r--Monitoring/src/main/python/monitoringmodel/config_undefined.owl1457
-rw-r--r--Monitoring/src/main/python/monitoringmodel/im$py.classbin0 -> 3713 bytes
-rw-r--r--Monitoring/src/main/python/monitoringmodel/im.py6
-rw-r--r--Monitoring/src/main/python/monitoringmodel/merged.owl1540
-rw-r--r--Monitoring/src/main/python/monitoringmodel/monitoringQuery_example.owl288
-rw-r--r--Monitoring/src/main/python/monitoringmodel/monitoring_event.owl151
-rw-r--r--Monitoring/src/main/python/monitoringmodel/monitoring_features.owl663
-rw-r--r--Monitoring/src/main/python/monitoringmodel/monitoring_parameters.owl126
-rw-r--r--Monitoring/src/main/python/monitoringmodel/monitoring_query.owl228
-rw-r--r--Monitoring/src/main/python/monitoringmodel/monitoring_stat.owl499
-rw-r--r--Monitoring/src/main/python/monitoringmodel/monitoring_task.owl561
-rw-r--r--Monitoring/src/main/python/monitoringmodel/novi-im.owl1101
-rw-r--r--Monitoring/src/main/python/monitoringmodel/policy_im.owl515
-rw-r--r--Monitoring/src/main/python/monitoringmodel/policy_imV2.owl684
-rw-r--r--Monitoring/src/main/python/monitoringmodel/policy_imV4.owl713
-rw-r--r--Monitoring/src/main/python/monitoringmodel/unit.owl1415
-rw-r--r--Monitoring/src/main/resources/OSGI-INF/blueprint/config.xml54
-rw-r--r--Monitoring/src/main/resources/site-packages/dependencies.pth0
-rw-r--r--Monitoring/src/test/java/eu/novi/monitoring/MonSrv2IT.java251
-rw-r--r--Monitoring/src/test/java/eu/novi/monitoring/MonSrvFEDITdisabled.java113
-rw-r--r--Monitoring/src/test/java/eu/novi/monitoring/MonSrvIT.java323
-rw-r--r--Monitoring/src/test/java/eu/novi/monitoring/MonSrvTest2.java45
-rw-r--r--Monitoring/src/test/python/FedericaTest.py65
-rw-r--r--Monitoring/src/test/python/ServiceTest.py93
-rw-r--r--Monitoring/src/test/python/allTests.py16
-rw-r--r--Monitoring/src/test/python/test.py24
229 files changed, 30230 insertions, 0 deletions
diff --git a/Monitoring/src/main/java/eu/novi/monitoring/Activator.java b/Monitoring/src/main/java/eu/novi/monitoring/Activator.java
new file mode 100644
index 0000000..582dbe8
--- /dev/null
+++ b/Monitoring/src/main/java/eu/novi/monitoring/Activator.java
@@ -0,0 +1,128 @@
+package eu.novi.monitoring;
+
+import org.osgi.framework.BundleActivator;
+import org.osgi.framework.BundleContext;
+import org.osgi.framework.ServiceReference;
+import org.osgi.util.tracker.ServiceTracker;
+import org.osgi.util.tracker.ServiceTrackerCustomizer;
+
+//import eu.novi.policy.interfaces.InterfaceForMonitoring;
+//import eu.novi.policy.monitoringevents.MonitoringEvents;
+
+//public class Activator implements BundleActivator, ServiceTrackerCustomizer {
+public class Activator implements BundleActivator {
+ private ServiceTracker resourceTracker;
+// private ServiceTracker policyTracker;
+ private BundleContext ctx;
+
+ /**
+ * Implements BundleActivator.start().
+ *
+ * @param bundleContext
+ * - the framework context for the bundle.
+ **/
+ public void start(BundleContext bundleContext) {
+ System.out.println(bundleContext.getBundle().getSymbolicName()
+ + " started");
+ MonSrv service = null;
+ try {
+ service = MonSrvFactory.create(bundleContext);
+ //startTrackingResourceService(bundleContext);
+ } catch (Exception ex) {
+ System.err.println("Resource:" + ex.getMessage());
+ ex.printStackTrace();
+ }
+
+// try {
+// startTrackingPolicyService(bundleContext);
+// } catch (Exception ex) {
+// System.err.println("Policy:" + ex.getMessage());
+// ex.printStackTrace();
+// }
+
+ try {
+ bundleContext
+ .registerService(MonSrv.class.getName(), service, null);
+ } catch (Exception ex) {
+ System.err.println("Register MonSrv:" + ex.getMessage());
+ ex.printStackTrace();
+ }
+ }
+
+ /*(private void startTrackingResourceService(BundleContext context) throws Exception {
+ ctx = context;
+ resourceTracker = new ServiceTracker(context, Resource.class.getName(), this);
+ resourceTracker.open();
+ }*/
+
+// private void startTrackingPolicyService(BundleContext context) throws Exception {
+// ctx = context;
+// policyTracker = new ServiceTracker(context, InterfaceForMonitoring.class.getName(), this);
+// policyTracker.open();
+// }
+
+
+ /**
+ * @see org.osgi.framework.BundleActivator#stop(org.osgi.framework.BundleContext)
+ */
+ public void stop(BundleContext bundleContext) throws Exception {
+ resourceTracker.close();
+ resourceTracker = null;
+// policyTracker.close();
+// policyTracker = null;
+ ctx = null;
+ }
+
+// private void setupPolicy(InterfaceForMonitoring monevents) {
+// final ServiceTracker monSrvTracker = new ServiceTracker(ctx, MonSrv.class.getName(), null);
+// monSrvTracker.open();
+// MonSrv service = (MonSrv) monSrvTracker.getService();
+// service.setPolicy(monevents);
+// monSrvTracker.close();
+// }
+
+ /*
+ * THIS IS QUITE OLD CODE, SHOULD IT BE REMOVED?
+ *
+ private void setupResource(Resource resource) {
+ final ServiceTracker monSrvTracker = new ServiceTracker(ctx, MonSrv.class.getName(), null);
+ monSrvTracker.open();
+ MonSrv service = (MonSrv) monSrvTracker.getService();
+ service.setResource(resource);
+ monSrvTracker.close();
+ }
+
+ @Override
+ public Object addingService(ServiceReference ref) {
+ final Object obj = ctx.getService(ref);
+ if ( obj instanceof Resource ) {
+ final Resource resource = (Resource) obj;
+ setupResource(resource);
+ }
+// else if ( obj instanceof InterfaceForMonitoring ) {
+// final InterfaceForMonitoring monevents = (InterfaceForMonitoring) obj;
+// setupPolicy(monevents);
+// }
+ return obj;
+ }
+
+ @Override
+ public void modifiedService(ServiceReference ref, Object service) {
+ if ( service instanceof Resource ) {
+ setupResource((Resource) service);
+ }
+// else if ( service instanceof InterfaceForMonitoring ) {
+// setupPolicy((InterfaceForMonitoring) service);
+// }
+ }
+
+ @Override
+ public void removedService(ServiceReference ref, Object service) {
+ if ( service instanceof Resource ) {
+ setupResource((Resource) service);
+ }
+// else if ( service instanceof InterfaceForMonitoring ) {
+// setupPolicy((InterfaceForMonitoring) service);
+// }
+ }*/
+}
diff --git a/Monitoring/src/main/java/eu/novi/monitoring/MonDiscovery.java b/Monitoring/src/main/java/eu/novi/monitoring/MonDiscovery.java
new file mode 100644
index 0000000..77f1b8f
--- /dev/null
+++ b/Monitoring/src/main/java/eu/novi/monitoring/MonDiscovery.java
@@ -0,0 +1,22 @@
+package eu.novi.monitoring;
+
+import eu.novi.monitoring.MonSrv;
+import java.util.List;
+
+
+public interface MonDiscovery {
+
+ public void setMonSrvList(List<MonSrv> monSrvList);
+
+ public List<MonSrv> getMonSrvList();
+
+ public MonSrv getInterface(String testbed);
+
+ public MonSrv getService(String testbed);
+
+ public void setTestbed(String testbed);
+
+ public String getTestbed();
+
+}
+
diff --git a/Monitoring/src/main/java/eu/novi/monitoring/MonDiscoveryImpl.java b/Monitoring/src/main/java/eu/novi/monitoring/MonDiscoveryImpl.java
new file mode 100644
index 0000000..300c68e
--- /dev/null
+++ b/Monitoring/src/main/java/eu/novi/monitoring/MonDiscoveryImpl.java
@@ -0,0 +1,104 @@
+package eu.novi.monitoring;
+
+import eu.novi.monitoring.MonSrv;
+import java.util.List;
+import java.util.ArrayList;
+
+import org.osgi.framework.Bundle;
+import org.osgi.framework.BundleContext;
+import org.osgi.framework.FrameworkUtil;
+import org.osgi.framework.InvalidSyntaxException;
+import org.osgi.framework.ServiceReference;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+public class MonDiscoveryImpl implements MonDiscovery {
+
+ private static final transient Logger log =
+ LoggerFactory.getLogger(MonDiscoveryImpl.class);
+
+
+ private List<MonSrv> monSrvList = new ArrayList<MonSrv>();
+
+ private String testbed = "Unknown";
+
+ public MonDiscoveryImpl() {
+
+ }
+
+ @Override
+ public void setMonSrvList(List<MonSrv> monSrvList) {
+ this.monSrvList = monSrvList;
+ }
+
+ @Override
+ public List<MonSrv> getMonSrvList() {
+ return monSrvList;
+ }
+
+ @Override
+ public void setTestbed(String testbed) {
+ this.testbed = testbed;
+ }
+
+ @Override
+ public String getTestbed() {
+ return testbed;
+ }
+
+ @Override
+ public MonSrv getInterface(String testbed) {
+ return this.getService(testbed);
+ }
+
+ @Override
+ public MonSrv getService(String testbed) {
+ MonSrv result = null;
+
+ if ((monSrvList !=null) && (monSrvList.size() > 0)) {
+ for (MonSrv msrv : monSrvList) {
+ if (msrv != null) {
+ if (msrv.getPlatform().equals(testbed)) return msrv;
+ }
+ }
+ }
+ log.warn("The monSrvList does not contain MonSrv on the testbed:" + testbed);
+ return getServiceBundle(testbed);
+ }
+
+ public MonSrv getServiceBundle(String testbed)
+ {
+ try {
+ Bundle bundle = null;
+ bundle = FrameworkUtil.getBundle(MonDiscoveryImpl.class);
+ BundleContext ctx = bundle.getBundleContext();
+ ctx.getServiceReferences(null, null);
+ ServiceReference [] monSrvs = ctx.getServiceReferences(MonSrv.class.getName(), null);
+ if(monSrvs == null || monSrvs.length == 0){
+ log.error("Cannot get MonSrv from bundle context. MonSrv is null or empty");
+ return null;
+ }else{
+ for(int i = 0; i < monSrvs.length; i++){
+ ServiceReference serviceReference = (ServiceReference)monSrvs[i];
+ MonSrv msrv = (MonSrv) ctx.getService(serviceReference);
+ if( msrv.getPlatform().equals(testbed) ) {
+ return msrv;
+ }
+ }
+ log.error("Cannot get MonSrv. There is no service on testbed:" + testbed);
+ }
+ } catch(NoClassDefFoundError e1) {
+ log.error("Problem to get the bundle of class: "+MonSrv.class.getName());
+ e1.printStackTrace();
+ return null;
+ } catch (InvalidSyntaxException e) {
+ log.error("Problem to get service reference from context");
+ e.printStackTrace();
+ return null;
+ }
+ return null;
+ }
+
+}
+
diff --git a/Monitoring/src/main/java/eu/novi/monitoring/MonSrv.java b/Monitoring/src/main/java/eu/novi/monitoring/MonSrv.java
new file mode 100644
index 0000000..34f9578
--- /dev/null
+++ b/Monitoring/src/main/java/eu/novi/monitoring/MonSrv.java
@@ -0,0 +1,86 @@
+package eu.novi.monitoring;
+
+import eu.novi.monitoring.credential.*;
+import eu.novi.monitoring.util.*;
+import java.util.List;
+//import eu.novi.resources.Resource;
+//import eu.novi.policy.interfaces.InterfaceForMonitoring;
+
+
+public interface MonSrv {
+ // Tester function
+ public List<String> echo(String message);
+
+ public MonitoringQuery createQuery();
+
+ public String getPlatform();
+
+// public InterfaceForMonitoring getPolicy();
+
+// public void setPolicy(InterfaceForMonitoring monevents);
+
+ public String measure(Credential credential, String query);
+
+ public String substrate(Credential credential, String query);
+
+// public String substrateFB(Credential credential, String query, String sessionID);
+
+ public String sliceTasks(Credential credential, String query);
+
+ public String addTask(Credential credential, String query);
+
+ public String describeTaskData(Credential credential, String query);
+
+ public String fetchTaskData(Credential credential, String query);
+
+ public String modifyTask(Credential credential, String query);
+
+ public String removeTask(Credential credential, String query);
+
+ public String enableTask(Credential credential, String query);
+
+ public String disableTask(Credential credential, String query);
+
+ public boolean getTaskStatus(Credential credential, String query);
+
+ public String addAggregator(Credential credential, String query);
+
+ public String removeAggregator(Credential credential, String query);
+
+ public String fetchAggregatorData(Credential credential, String query);
+
+ public String addCondition(Credential credential, String query);
+
+ public String removeCondition(Credential credential, String query);
+
+ public String modifyCondition(Credential credential, String query);
+
+
+ // Substrate monitoring function
+// public String measure(Scredential, query):
+// pass
+//
+// # Slice monitoring functions
+// def addTask(self, credential, query):
+// pass
+//
+// def modifyTask(self, credential, query):
+// pass
+//
+// def removeTask(self, credential, query):
+// pass
+//
+// def getTaskStatus(self, credential, query):
+// pass
+//
+// def addCondition(self, credential, query):
+// pass
+//
+// def modifyCondition(self, credential, query):
+// pass
+//
+// def removeCondition(self, credential, query):
+// pass
+//
+
+}
diff --git a/Monitoring/src/main/java/eu/novi/monitoring/MonSrvFactory.java b/Monitoring/src/main/java/eu/novi/monitoring/MonSrvFactory.java
new file mode 100644
index 0000000..82a75af
--- /dev/null
+++ b/Monitoring/src/main/java/eu/novi/monitoring/MonSrvFactory.java
@@ -0,0 +1,12 @@
+package eu.novi.monitoring;
+
+import org.osgi.framework.BundleContext;
+
+import eu.novi.python.integration.JythonObjectFactory;
+
+public class MonSrvFactory {
+
+ public static MonSrv create(BundleContext ctx) throws ClassNotFoundException {
+ return JythonObjectFactory.createObject(MonSrv.class, "Service.MonSrvImpl", ctx);
+ }
+}
diff --git a/Monitoring/src/main/java/eu/novi/monitoring/Wiring.java b/Monitoring/src/main/java/eu/novi/monitoring/Wiring.java
new file mode 100644
index 0000000..f618a0f
--- /dev/null
+++ b/Monitoring/src/main/java/eu/novi/monitoring/Wiring.java
@@ -0,0 +1,14 @@
+package eu.novi.monitoring;
+
+import java.util.Map;
+//import eu.novi.feedback.event.ReportEvent;
+
+public interface Wiring {
+ void setTestbed(String name);
+ String getTestbed();
+// void setUserFeedback(ReportEvent userFeedback);
+// ReportEvent getUserFeedback();
+ void addService(MonSrv service, Map<String, Object> properties);
+ void removeService(MonSrv service);
+
+}
diff --git a/Monitoring/src/main/java/eu/novi/monitoring/credential/Credential.java b/Monitoring/src/main/java/eu/novi/monitoring/credential/Credential.java
new file mode 100644
index 0000000..eb917f3
--- /dev/null
+++ b/Monitoring/src/main/java/eu/novi/monitoring/credential/Credential.java
@@ -0,0 +1,5 @@
+package eu.novi.monitoring.credential;
+
+public abstract class Credential {
+ public abstract String getType();
+}
diff --git a/Monitoring/src/main/java/eu/novi/monitoring/credential/UsernamePassword.java b/Monitoring/src/main/java/eu/novi/monitoring/credential/UsernamePassword.java
new file mode 100644
index 0000000..36da4eb
--- /dev/null
+++ b/Monitoring/src/main/java/eu/novi/monitoring/credential/UsernamePassword.java
@@ -0,0 +1,17 @@
+package eu.novi.monitoring.credential;
+
+public class UsernamePassword extends Credential {
+ public String username;
+ public String password;
+
+ public UsernamePassword(String username, String password)
+ {
+ this.username = username;
+ this.password = password;
+ }
+
+ public String getType()
+ {
+ return "UsernamePassword";
+ }
+}
diff --git a/Monitoring/src/main/java/eu/novi/monitoring/credential/UsernameRSAKey.java b/Monitoring/src/main/java/eu/novi/monitoring/credential/UsernameRSAKey.java
new file mode 100644
index 0000000..3320ca6
--- /dev/null
+++ b/Monitoring/src/main/java/eu/novi/monitoring/credential/UsernameRSAKey.java
@@ -0,0 +1,19 @@
+package eu.novi.monitoring.credential;
+
+public class UsernameRSAKey extends Credential {
+ public String username;
+ public String password;
+ public String RSAKey;
+
+ public UsernameRSAKey(String username, String RSAKey, String password)
+ {
+ this.username = username;
+ this.RSAKey = RSAKey;
+ this.password = password;
+ }
+
+ public String getType()
+ {
+ return "UsernameRSAKey";
+ }
+}
diff --git a/Monitoring/src/main/java/eu/novi/monitoring/util/MonitoringQuery.java b/Monitoring/src/main/java/eu/novi/monitoring/util/MonitoringQuery.java
new file mode 100644
index 0000000..61f5e48
--- /dev/null
+++ b/Monitoring/src/main/java/eu/novi/monitoring/util/MonitoringQuery.java
@@ -0,0 +1,11 @@
+package eu.novi.monitoring.util;
+
+public interface MonitoringQuery {
+
+ public void addFeature( String queryName, String feature);
+ public void addResource( String queryName, String resourceName, String resourceType);
+ public void addInterface( String resourceName, String interfaceName, String interfaceType);
+ public void defineInterface(String interfaceName, String address, String addressType);
+ public String serialize();
+}
+
diff --git a/Monitoring/src/main/python/Credential/SshKeygen.py b/Monitoring/src/main/python/Credential/SshKeygen.py
new file mode 100644
index 0000000..7998575
--- /dev/null
+++ b/Monitoring/src/main/python/Credential/SshKeygen.py
@@ -0,0 +1,99 @@
+from __future__ import with_statement
+'''
+Created on Jul 20, 2011
+
+@author: steger
+'''
+from M2Crypto import RSA
+from base64 import b64encode
+from os import chmod, path
+import stat
+
+# paramiko provides this functionality, so maybe we don't need this class. see paramiko.PKey
+
+class CannotSet(Exception):
+ pass
+
+class SshKeygen(object):
+ '''
+ Generates a pair of RSA keys.
+ Enables saving the keys to the file system.
+ '''
+ def __init__(self, bits = 1024, e = 65337):
+ '''
+ Initiates the pair of RSA keys
+ @param bits: the length of the keys in bits
+ @type bits: integer
+ @param e: the exponent
+ @type e: integer
+ '''
+ self.rsa = RSA.gen_key(bits, e, lambda: None)
+
+ def _check_filename(self, filename):
+ if path.exists(filename):
+ raise Exception("File exists: %s" % filename)
+
+ def _get_private(self):
+ '''
+ @summary: return the private key in PEM format
+ @return: the private key in PEM format
+ @rtype: string
+ '''
+ return self.rsa.as_pem(cipher = None)
+
+ def _set_private(self, value):
+ raise CannotSet
+
+ def _del_private(self):
+ raise CannotSet
+
+ @staticmethod
+ def _convert(rsa):
+ return b64encode('\x00\x00\x00\x07ssh-rsa%s%s' % (rsa.pub()[0], rsa.pub()[1]))
+
+ def _get_public(self):
+ '''
+ @summary: return the public key in base64 format conforming to the content of authorized_keys
+ @return: the public key in base64 format
+ @rtype: string
+ '''
+ return self._convert(self.rsa)
+
+ def _set_public(self, value):
+ raise CannotSet
+
+ def _del_public(self):
+ raise CannotSet
+
+ def save_private_key(self, filename):
+ '''
+ @summary: save the private key in the file system in a named file.
+ @param filename: the filename to store the private key.
+ @type filename: string
+ '''
+ self._check_filename(filename)
+ self.rsa.save_key(filename, cipher = None)
+ chmod(filename, stat.S_IRUSR)
+
+ def save_public_key(self, filename):
+ '''
+ @summary: save the public key in the file system in a named file.
+ @param filename: the filename to store the public key.
+ @type filename: string
+ '''
+ self._check_filename(filename)
+ with open(filename, "w") as f:
+ f.write("ssh-rsa %s" % self.public)
+
+ @staticmethod
+ def convert_key_from_file(filename):
+ '''
+ @summary: convert a private key stored in a file in PEM format and return the public key in base64 format conforming to the content of authorized_keys
+ @return: the public key in base64 format
+ @rtype: string
+ '''
+ return SshKeygen._convert( RSA.load_key(file = filename) )
+
+ public = property(_get_public,_set_public,_del_public)
+
+ private = property(_get_private,_set_private,_del_private)
diff --git a/Monitoring/src/main/python/Credential/SshKeygen.py.old b/Monitoring/src/main/python/Credential/SshKeygen.py.old
new file mode 100644
index 0000000..6c8042d
--- /dev/null
+++ b/Monitoring/src/main/python/Credential/SshKeygen.py.old
@@ -0,0 +1,100 @@
+'''
+Created on Jul 20, 2011
+
+@author: steger
+'''
+from M2Crypto import RSA
+from base64 import b64encode
+from os import chmod, path
+import stat
+
+# paramiko provides this functionality, so maybe we don't need this class. see paramiko.PKey
+
+class CannotSet(Exception):
+ pass
+
+class SshKeygen(object):
+ '''
+ Generates a pair of RSA keys.
+ Enables saving the keys to the file system.
+ '''
+ def __init__(self, bits = 1024, e = 65337):
+ '''
+ Initiates the pair of RSA keys
+ @param bits: the length of the keys in bits
+ @type bits: integer
+ @param e: the exponent
+ @type e: integer
+ '''
+ self.rsa = RSA.gen_key(bits, e, lambda: None)
+
+ def _check_filename(self, filename):
+ if path.exists(filename):
+ raise Exception("File exists: %s" % filename)
+
+ @property
+ def private(self):
+ '''
+ @summary: return the private key in PEM format
+ @return: the private key in PEM format
+ @rtype: string
+ '''
+ return self.rsa.as_pem(cipher = None)
+
+ @private.setter
+ def private(self, value):
+ raise CannotSet
+
+ @private.deleter
+ def private(self):
+ raise CannotSet
+
+ @staticmethod
+ def _convert(rsa):
+ return b64encode('\x00\x00\x00\x07ssh-rsa%s%s' % (rsa.pub()[0], rsa.pub()[1]))
+
+ @property
+ def public(self):
+ '''
+ @summary: return the public key in base64 format conforming to the content of authorized_keys
+ @return: the public key in base64 format
+ @rtype: string
+ '''
+ return self._convert(self.rsa)
+
+ @public.setter
+ def public(self, value):
+ raise CannotSet
+
+ @public.deleter
+ def public(self):
+ raise CannotSet
+
+ def save_private_key(self, filename):
+ '''
+ @summary: save the private key in the file system in a named file.
+ @param filename: the filename to store the private key.
+ @type filename: string
+ '''
+ self._check_filename(filename)
+ self.rsa.save_key(filename, cipher = None)
+ chmod(filename, stat.S_IRUSR)
+
+ def save_public_key(self, filename):
+ '''
+ @summary: save the public key in the file system in a named file.
+ @param filename: the filename to store the public key.
+ @type filename: string
+ '''
+ self._check_filename(filename)
+ with open(filename, "w") as f:
+ f.write("ssh-rsa %s" % self.public)
+
+ @staticmethod
+ def convert_key_from_file(filename):
+ '''
+ @summary: convert a private key stored in a file in PEM format and return the public key in base64 format conforming to the content of authorized_keys
+ @return: the public key in base64 format
+ @rtype: string
+ '''
+ return SshKeygen._convert( RSA.load_key(file = filename) )
diff --git a/Monitoring/src/main/python/Credential/__init__$py.class b/Monitoring/src/main/python/Credential/__init__$py.class
new file mode 100644
index 0000000..c1e5c83
--- /dev/null
+++ b/Monitoring/src/main/python/Credential/__init__$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Credential/__init__.py b/Monitoring/src/main/python/Credential/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Monitoring/src/main/python/Credential/__init__.py
diff --git a/Monitoring/src/main/python/Credential/__init__.py.old b/Monitoring/src/main/python/Credential/__init__.py.old
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Monitoring/src/main/python/Credential/__init__.py.old
diff --git a/Monitoring/src/main/python/Credential/credentialtypes$py.class b/Monitoring/src/main/python/Credential/credentialtypes$py.class
new file mode 100644
index 0000000..e23dd34
--- /dev/null
+++ b/Monitoring/src/main/python/Credential/credentialtypes$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Credential/credentialtypes.py b/Monitoring/src/main/python/Credential/credentialtypes.py
new file mode 100644
index 0000000..5af53b5
--- /dev/null
+++ b/Monitoring/src/main/python/Credential/credentialtypes.py
@@ -0,0 +1,52 @@
+'''
+Created on Oct 27, 2011
+
+@author: steger, jozsef
+@organization: ELTE
+@contact: steger@complex.elte.hu
+'''
+
+class Credential(object):
+ '''
+ @summary: an empty credential to serve as an ancient class
+ @author: steger, jozsef
+ '''
+ pass
+
+class UsernamePassword(Credential):
+ '''
+ @summary: container for a pair of user name and password
+ @author: steger, jozsef
+ '''
+
+ def __init__(self, username, password):
+ '''
+ @summary: Constructor
+ @param username: the username
+ @type username: string
+ @param password: the password
+ @type password: string
+ '''
+ self.username = username
+ self.password = password
+
+class UsernameRSAKey(Credential):
+ '''
+ @summary: container for a triple of user name, private key and an optional password for the key
+ @author: steger, jozsef
+ '''
+
+ def __init__(self, username, rsakey, password = ""):
+ '''
+ @summary: Constructor
+ @param username: the username
+ @type username: string
+ @param rsakey: the private key
+ @type rsakey: string
+ @param password: the optional password to unlock the private key, default: ""
+ @type password: string
+ '''
+ self.username = username
+ self.rsakey = rsakey
+ self.password = password
+ \ No newline at end of file
diff --git a/Monitoring/src/main/python/Credential/credentialtypes.py.old b/Monitoring/src/main/python/Credential/credentialtypes.py.old
new file mode 100644
index 0000000..5af53b5
--- /dev/null
+++ b/Monitoring/src/main/python/Credential/credentialtypes.py.old
@@ -0,0 +1,52 @@
+'''
+Created on Oct 27, 2011
+
+@author: steger, jozsef
+@organization: ELTE
+@contact: steger@complex.elte.hu
+'''
+
+class Credential(object):
+ '''
+ @summary: an empty credential to serve as an ancient class
+ @author: steger, jozsef
+ '''
+ pass
+
+class UsernamePassword(Credential):
+ '''
+ @summary: container for a pair of user name and password
+ @author: steger, jozsef
+ '''
+
+ def __init__(self, username, password):
+ '''
+ @summary: Constructor
+ @param username: the username
+ @type username: string
+ @param password: the password
+ @type password: string
+ '''
+ self.username = username
+ self.password = password
+
+class UsernameRSAKey(Credential):
+ '''
+ @summary: container for a triple of user name, private key and an optional password for the key
+ @author: steger, jozsef
+ '''
+
+ def __init__(self, username, rsakey, password = ""):
+ '''
+ @summary: Constructor
+ @param username: the username
+ @type username: string
+ @param rsakey: the private key
+ @type rsakey: string
+ @param password: the optional password to unlock the private key, default: ""
+ @type password: string
+ '''
+ self.username = username
+ self.rsakey = rsakey
+ self.password = password
+ \ No newline at end of file
diff --git a/Monitoring/src/main/python/Credential/test.py b/Monitoring/src/main/python/Credential/test.py
new file mode 100644
index 0000000..04a59f3
--- /dev/null
+++ b/Monitoring/src/main/python/Credential/test.py
@@ -0,0 +1,46 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+import unittest
+from os import close, unlink
+from tempfile import mkstemp
+from subprocess import Popen, PIPE
+from SshKeygen import SshKeygen
+
+class Test(unittest.TestCase):
+ sshkeygen = '/usr/bin/ssh-keygen'
+
+ def test_sshkeygen(self):
+ # generate a pair of RSA keys
+ self.key = SshKeygen()
+
+ # save keys in a file
+ fid, fn = mkstemp(suffix = "_rsa")
+ close(fid)
+ unlink(fn)
+ self.fn_private = fn
+ self.fn_public = "%s.pub" % fn
+ self.key.save_private_key(self.fn_private)
+ self.key.save_public_key(self.fn_public)
+
+ # Test the base64 format of the public key.
+ # convert and compare private key using ssh-keygen
+ proc = Popen(args = [self.sshkeygen, '-y', '-f', self.fn_private], stdout = PIPE)
+ converted = str(proc.communicate(input = None)[0])
+ expected = "ssh-rsa %s\n" % self.key.public
+ self.assertEqual(expected, converted, "Base64 encoded public RSA key differs from the one generated by %s" % self.sshkeygen)
+
+ # Test SshKeygen objects convert_key_from_file method.
+ expected = self.key.public
+ converted = SshKeygen.convert_key_from_file(self.fn_private)
+ self.assertEqual(expected, converted, "Base64 encoded public RSA key generated from file %s differs from expected" % self.fn_private)
+
+ # remove generated files
+ unlink(self.fn_private)
+ unlink(self.fn_public)
+
+if __name__ == "__main__":
+ #import sys;sys.argv = ['', 'Test.test_sshkeygen']
+ unittest.main() \ No newline at end of file
diff --git a/Monitoring/src/main/python/Credential/test.py.old b/Monitoring/src/main/python/Credential/test.py.old
new file mode 100644
index 0000000..04a59f3
--- /dev/null
+++ b/Monitoring/src/main/python/Credential/test.py.old
@@ -0,0 +1,46 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+import unittest
+from os import close, unlink
+from tempfile import mkstemp
+from subprocess import Popen, PIPE
+from SshKeygen import SshKeygen
+
+class Test(unittest.TestCase):
+ sshkeygen = '/usr/bin/ssh-keygen'
+
+ def test_sshkeygen(self):
+ # generate a pair of RSA keys
+ self.key = SshKeygen()
+
+ # save keys in a file
+ fid, fn = mkstemp(suffix = "_rsa")
+ close(fid)
+ unlink(fn)
+ self.fn_private = fn
+ self.fn_public = "%s.pub" % fn
+ self.key.save_private_key(self.fn_private)
+ self.key.save_public_key(self.fn_public)
+
+ # Test the base64 format of the public key.
+ # convert and compare private key using ssh-keygen
+ proc = Popen(args = [self.sshkeygen, '-y', '-f', self.fn_private], stdout = PIPE)
+ converted = str(proc.communicate(input = None)[0])
+ expected = "ssh-rsa %s\n" % self.key.public
+ self.assertEqual(expected, converted, "Base64 encoded public RSA key differs from the one generated by %s" % self.sshkeygen)
+
+ # Test SshKeygen objects convert_key_from_file method.
+ expected = self.key.public
+ converted = SshKeygen.convert_key_from_file(self.fn_private)
+ self.assertEqual(expected, converted, "Base64 encoded public RSA key generated from file %s differs from expected" % self.fn_private)
+
+ # remove generated files
+ unlink(self.fn_private)
+ unlink(self.fn_public)
+
+if __name__ == "__main__":
+ #import sys;sys.argv = ['', 'Test.test_sshkeygen']
+ unittest.main() \ No newline at end of file
diff --git a/Monitoring/src/main/python/DataProcessing/Aggregator$py.class b/Monitoring/src/main/python/DataProcessing/Aggregator$py.class
new file mode 100644
index 0000000..452e4b9
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/Aggregator$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/DataProcessing/Aggregator.py b/Monitoring/src/main/python/DataProcessing/Aggregator.py
new file mode 100644
index 0000000..7e01145
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/Aggregator.py
@@ -0,0 +1,265 @@
+from __future__ import with_statement
+'''
+Created on Aug 10, 2011
+
+@author: steger, gombos, matuszka
+'''
+
+from DataProcessing.MeasurementLevel import Ordinal, Ratio, Interval #Nominal
+from math import sqrt
+from DataProcessing.DataReader import DataReader
+from DataProcessing.DataHeader import DataHeader, DataHeaderCell
+from DataProcessing.DataSource import DataSource
+from DataProcessing.Data import Data
+from DataProcessing.DataError import AggregatorError
+
+class Aggregator(DataSource):
+ '''
+ classdocs
+ '''
+ cn_count = 'Count'
+
+ def __init__(self, datasource, cellrequest):
+ '''
+ Constructor
+ @param datasource: table of records to manipulate with
+ @type datasource: DataSource
+ @param cellrequest: a column wise projection of the table is carried out, this column is kept
+ @type cellrequest: CellRequest
+ '''
+ if not isinstance(datasource, DataSource):
+ raise AggregatorError("Wrong type of datasource %s" % datasource)
+ DataSource.__init__(self, datasource)
+ self._reader = DataReader(datasource = datasource._data)
+ self._reader.extract(cellrequest = [cellrequest])
+ for c in self._reader.headercells():
+ break
+ if not c.dimension.level(self.dimension_compatible):
+ raise AggregatorError("The measurement level of input (%s) is not compatible with %s" % (c.dimension, self.name))
+ header = DataHeader("%sAggregate(%s)" % (self.name, self.source.name))
+ dimension = c.dimension
+ header.addColumn(DataHeaderCell(name = self.cn_count, dimension = dimension.manager["Countable"]))
+ self.cn_aggr = '%s(%s)' % (self.name, c.name)
+ header.addColumn(DataHeaderCell(name = self.cn_aggr, dimension = dimension, unit = c.unit))
+ self._data = Data(self.um, header)
+ self._record = self._data.getTemplate(size = 1)
+
+ def _get_dimension_compatible(self):
+ raise AggregatorError("dimension_compatible property is not implemented in %s" % self)
+
+
+
+ dimension_compatible = property(_get_dimension_compatible,None,None)
+class Sum(Aggregator):
+ def __init__(self, datasource, cellrequest):
+ Aggregator.__init__(self, datasource, cellrequest)
+ self._aggregate = 0
+
+ def _get_dimension_compatible(self):
+ return Interval
+
+ def _get_name(self):
+ return "Sum"
+
+ def _process(self):
+ if self._reader.sourceCleared.isSet():
+ self._reader.sourceCleared.clear()
+ self._aggregate = 0
+ self._reader.rewind()
+ changed = True
+ else:
+ changed = False
+ for (x,) in self._reader:
+ self._aggregate += float(x)
+ changed = True
+ if changed:
+ self._data.clear()
+ self._record.update(name = self.cn_aggr, values = (self._aggregate,))
+ self._record.update(name = self.cn_count, values = (len(self.source),))
+ self._data.saveRecord(self._record)
+
+
+ dimension_compatible = property(_get_dimension_compatible,None,None)
+
+ name = property(_get_name,None,None)
+class Min(Aggregator):
+ def __init__(self, datasource, cellrequest):
+ Aggregator.__init__(self, datasource, cellrequest)
+ self._aggregate = None
+
+ def _get_dimension_compatible(self):
+ return Ordinal
+
+ def _get_name(self):
+ return "Min"
+
+ def _process(self):
+ changed = False
+ if self._aggregate is None:
+ for (x,) in self._reader:
+ changed = True
+ self._aggregate = float(x)
+ break
+ sample = [self._aggregate]
+ for (x,) in self._reader:
+ sample.append( float(x) )
+ changed = True
+ if changed:
+ self._aggregate = min(sample)
+ self._data.clear()
+ self._record.update(name = self.cn_aggr, values = (self._aggregate,))
+ self._record.update(name = self.cn_count, values = (len(self.source),))
+ self._data.saveRecord(self._record)
+
+
+ dimension_compatible = property(_get_dimension_compatible,None,None)
+
+ name = property(_get_name,None,None)
+class Max(Aggregator):
+ def __init__(self, datasource, cellrequest):
+ Aggregator.__init__(self, datasource, cellrequest)
+ self._aggregate = None
+
+ def _get_dimension_compatible(self):
+ return Ordinal
+
+ def _get_name(self):
+ return "Max"
+
+ def _process(self):
+ changed = False
+ if self._aggregate is None:
+ for (x,) in self._reader:
+ changed = True
+ self._aggregate = float(x)
+ break
+ sample = [self._aggregate]
+ for (x,) in self._reader:
+ sample.append( float(x) )
+ changed = True
+ if changed:
+ self._aggregate = max(sample)
+ self._data.clear()
+ self._record.update(name = self.cn_aggr, values = (self._aggregate,))
+ self._record.update(name = self.cn_count, values = (len(self.source),))
+ self._data.saveRecord(self._record)
+
+
+ dimension_compatible = property(_get_dimension_compatible,None,None)
+
+ name = property(_get_name,None,None)
+class Mean(Aggregator):
+ def __init__(self, datasource, cellrequest):
+ Aggregator.__init__(self, datasource, cellrequest)
+ self._sum = 0
+ self._aggregate = None
+
+ def _get_dimension_compatible(self):
+ return Ratio
+
+ def _get_name(self):
+ return "Mean"
+
+ def _process(self):
+ changed = False
+ for (x,) in self._reader:
+ self._sum += float(x)
+ changed = True
+ if changed:
+ self._aggregate = self._sum / float(len(self.source))
+ self._data.clear()
+ self._record.update(name = self.cn_aggr, values = (self._aggregate,))
+ self._record.update(name = self.cn_count, values = (len(self.source),))
+ self._data.saveRecord(self._record)
+
+
+ dimension_compatible = property(_get_dimension_compatible,None,None)
+
+ name = property(_get_name,None,None)
+class Deviation(Aggregator):
+ def __init__(self, data, cellrequest):
+ Aggregator.__init__(self, data, cellrequest)
+ self._aggregate = None
+ self._emp = True
+
+ def _get_empirical(self):
+ return self._emp
+ def _set_empirical(self, emp):
+ self._emp = bool(emp)
+
+ def _get_dimension_compatible(self):
+ return Ratio
+
+ def _get_name(self):
+ return "StdDev"
+
+ def _process(self):
+ changed = False
+ aggr = 0
+ data = []
+ self._reader.rewind()
+ for (x,) in self._reader:
+ x = float(x)
+ aggr += x
+ data.append(x)
+ changed = True
+ if changed:
+ n = float(len(data))
+ avg = aggr / n
+ s2 = map(lambda x: (x-avg)*(x-avg), data)
+ if self.empirical:
+ self._aggregate = sqrt(sum(s2) / (n+1))
+ else:
+ self._aggregate = sqrt(sum(s2) / n)
+ self._data.clear()
+ self._record.update(name = self.cn_aggr, values = (self._aggregate,))
+ self._record.update(name = self.cn_count, values = (len(self.source),))
+ self._data.saveRecord(self._record)
+
+
+ dimension_compatible = property(_get_dimension_compatible,None,None)
+
+ empirical = property(_get_empirical,_set_empirical,None)
+
+ name = property(_get_name,None,None)
+class Percentile(Aggregator):
+ def __init__(self, data, cellrequest):
+ self._aggregate = None
+ self._percentile = .75
+ Aggregator.__init__(self, data, cellrequest)
+
+ def _get_percentile(self):
+ return self._percentile
+ def _set_percentile(self, percentile):
+ self._percentile = max(0, min(1, float(percentile)))
+
+ def _get_dimension_compatible(self):
+ return Ordinal
+
+ def _get_name(self):
+ return "Percentile_%d%%" % int(round(100 * self.percentile))
+
+ def _process(self):
+ data = []
+ self._reader.rewind()
+ for (x,) in self._reader:
+ data.append(x)
+ data.sort()
+ n = len(data)
+ p = int((n - 1) * self.percentile)
+ if n % 2:
+ val = data[p]
+ else:
+ val = .5 * (data[p] + data[p+1])
+ if self._aggregate != val:
+ self._aggregate = val
+ self._data.clear()
+ self._record.update(name = self.cn_aggr, values = (self._aggregate,))
+ self._record.update(name = self.cn_count, values = (len(self.source),))
+ self._data.saveRecord(self._record)
+
+ dimension_compatible = property(_get_dimension_compatible,None,None)
+
+ percentile = property(_get_percentile,_set_percentile,None)
+
+ name = property(_get_name,None,None)
diff --git a/Monitoring/src/main/python/DataProcessing/Aggregator.py.old b/Monitoring/src/main/python/DataProcessing/Aggregator.py.old
new file mode 100644
index 0000000..f50b4d2
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/Aggregator.py.old
@@ -0,0 +1,251 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger, gombos, matuszka
+'''
+
+from DataProcessing.MeasurementLevel import Ordinal, Ratio, Interval #Nominal
+from math import sqrt
+from DataProcessing.DataReader import DataReader
+from DataProcessing.DataHeader import DataHeader, DataHeaderCell
+from DataProcessing.DataSource import DataSource
+from DataProcessing.Data import Data
+from DataProcessing.DataError import AggregatorError
+
+class Aggregator(DataSource):
+ '''
+ classdocs
+ '''
+ cn_count = 'Count'
+
+ def __init__(self, datasource, cellrequest):
+ '''
+ Constructor
+ @param datasource: table of records to manipulate with
+ @type datasource: DataSource
+ @param cellrequest: a column wise projection of the table is carried out, this column is kept
+ @type cellrequest: CellRequest
+ '''
+ if not isinstance(datasource, DataSource):
+ raise AggregatorError("Wrong type of datasource %s" % datasource)
+ DataSource.__init__(self, datasource)
+ self._reader = DataReader(datasource = datasource._data)
+ self._reader.extract(cellrequest = [cellrequest])
+ for c in self._reader.headercells():
+ break
+ if not c.dimension.level(self.dimension_compatible):
+ raise AggregatorError("The measurement level of input (%s) is not compatible with %s" % (c.dimension, self.name))
+ header = DataHeader("%sAggregate(%s)" % (self.name, self.source.name))
+ dimension = c.dimension
+ header.addColumn(DataHeaderCell(name = self.cn_count, dimension = dimension.manager["Countable"]))
+ self.cn_aggr = '%s(%s)' % (self.name, c.name)
+ header.addColumn(DataHeaderCell(name = self.cn_aggr, dimension = dimension, unit = c.unit))
+ self._data = Data(self.um, header)
+ self._record = self._data.getTemplate(size = 1)
+
+ @property
+ def dimension_compatible(self):
+ raise AggregatorError("dimension_compatible property is not implemented in %s" % self)
+
+
+class Sum(Aggregator):
+ def __init__(self, datasource, cellrequest):
+ Aggregator.__init__(self, datasource, cellrequest)
+ self._aggregate = 0
+
+ @property
+ def dimension_compatible(self):
+ return Interval
+
+ @property
+ def name(self):
+ return "Sum"
+
+ def _process(self):
+ if self._reader.sourceCleared.isSet():
+ self._reader.sourceCleared.clear()
+ self._aggregate = 0
+ self._reader.rewind()
+ changed = True
+ else:
+ changed = False
+ for (x,) in self._reader:
+ self._aggregate += float(x)
+ changed = True
+ if changed:
+ self._data.clear()
+ self._record.update(name = self.cn_aggr, values = (self._aggregate,))
+ self._record.update(name = self.cn_count, values = (len(self.source),))
+ self._data.saveRecord(self._record)
+
+class Min(Aggregator):
+ def __init__(self, datasource, cellrequest):
+ Aggregator.__init__(self, datasource, cellrequest)
+ self._aggregate = None
+
+ @property
+ def dimension_compatible(self):
+ return Ordinal
+
+ @property
+ def name(self):
+ return "Min"
+
+ def _process(self):
+ changed = False
+ if self._aggregate is None:
+ for (x,) in self._reader:
+ changed = True
+ self._aggregate = float(x)
+ break
+ sample = [self._aggregate]
+ for (x,) in self._reader:
+ sample.append( float(x) )
+ changed = True
+ if changed:
+ self._aggregate = min(sample)
+ self._data.clear()
+ self._record.update(name = self.cn_aggr, values = (self._aggregate,))
+ self._record.update(name = self.cn_count, values = (len(self.source),))
+ self._data.saveRecord(self._record)
+
+class Max(Aggregator):
+ def __init__(self, datasource, cellrequest):
+ Aggregator.__init__(self, datasource, cellrequest)
+ self._aggregate = None
+
+ @property
+ def dimension_compatible(self):
+ return Ordinal
+
+ @property
+ def name(self):
+ return "Max"
+
+ def _process(self):
+ changed = False
+ if self._aggregate is None:
+ for (x,) in self._reader:
+ changed = True
+ self._aggregate = float(x)
+ break
+ sample = [self._aggregate]
+ for (x,) in self._reader:
+ sample.append( float(x) )
+ changed = True
+ if changed:
+ self._aggregate = max(sample)
+ self._data.clear()
+ self._record.update(name = self.cn_aggr, values = (self._aggregate,))
+ self._record.update(name = self.cn_count, values = (len(self.source),))
+ self._data.saveRecord(self._record)
+
+class Mean(Aggregator):
+ def __init__(self, datasource, cellrequest):
+ Aggregator.__init__(self, datasource, cellrequest)
+ self._sum = 0
+ self._aggregate = None
+
+ @property
+ def dimension_compatible(self):
+ return Ratio
+
+ @property
+ def name(self):
+ return "Mean"
+
+ def _process(self):
+ changed = False
+ for (x,) in self._reader:
+ self._sum += float(x)
+ changed = True
+ if changed:
+ self._aggregate = self._sum / float(len(self.source))
+ self._data.clear()
+ self._record.update(name = self.cn_aggr, values = (self._aggregate,))
+ self._record.update(name = self.cn_count, values = (len(self.source),))
+ self._data.saveRecord(self._record)
+
+class Deviation(Aggregator):
+ def __init__(self, data, cellrequest):
+ Aggregator.__init__(self, data, cellrequest)
+ self._aggregate = None
+ self._emp = True
+
+ @property
+ def empirical(self):
+ return self._emp
+ @empirical.setter
+ def empirical(self, emp):
+ self._emp = bool(emp)
+
+ @property
+ def dimension_compatible(self):
+ return Ratio
+
+ @property
+ def name(self):
+ return "StdDev"
+
+ def _process(self):
+ changed = False
+ aggr = 0
+ data = []
+ self._reader.rewind()
+ for (x,) in self._reader:
+ x = float(x)
+ aggr += x
+ data.append(x)
+ changed = True
+ if changed:
+ n = float(len(data))
+ avg = aggr / n
+ s2 = map(lambda x: (x-avg)*(x-avg), data)
+ if self.empirical:
+ self._aggregate = sqrt(sum(s2) / (n+1))
+ else:
+ self._aggregate = sqrt(sum(s2) / n)
+ self._data.clear()
+ self._record.update(name = self.cn_aggr, values = (self._aggregate,))
+ self._record.update(name = self.cn_count, values = (len(self.source),))
+ self._data.saveRecord(self._record)
+
+class Percentile(Aggregator):
+ def __init__(self, data, cellrequest):
+ self._aggregate = None
+ self._percentile = .75
+ Aggregator.__init__(self, data, cellrequest)
+
+ @property
+ def percentile(self):
+ return self._percentile
+ @percentile.setter
+ def percentile(self, percentile):
+ self._percentile = max(0, min(1, float(percentile)))
+
+ @property
+ def dimension_compatible(self):
+ return Ordinal
+
+ @property
+ def name(self):
+ return "Percentile_%d%%" % int(round(100 * self.percentile))
+
+ def _process(self):
+ data = []
+ self._reader.rewind()
+ for (x,) in self._reader:
+ data.append(x)
+ data.sort()
+ n = len(data)
+ p = int((n - 1) * self.percentile)
+ if n % 2:
+ val = data[p]
+ else:
+ val = .5 * (data[p] + data[p+1])
+ if self._aggregate != val:
+ self._aggregate = val
+ self._data.clear()
+ self._record.update(name = self.cn_aggr, values = (self._aggregate,))
+ self._record.update(name = self.cn_count, values = (len(self.source),))
+ self._data.saveRecord(self._record)
diff --git a/Monitoring/src/main/python/DataProcessing/AggregatorManager$py.class b/Monitoring/src/main/python/DataProcessing/AggregatorManager$py.class
new file mode 100644
index 0000000..0db47be
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/AggregatorManager$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/DataProcessing/AggregatorManager.py b/Monitoring/src/main/python/DataProcessing/AggregatorManager.py
new file mode 100644
index 0000000..7a2df8f
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/AggregatorManager.py
@@ -0,0 +1,40 @@
+from __future__ import with_statement
+'''
+Created on Dec 10, 2012
+
+@author: steger
+'''
+from DataProcessing.Aggregator import AggregatorError, Aggregator
+from DataProcessing.Sampler import Sampler
+
+class AggregatorManager(object):
+ def __init__(self):
+ self._id = 0;
+ self._aggregators = {}
+
+ def newAggregator(self, dataSource, cellrequest, commandflow):
+ for c, ca in commandflow:
+ if issubclass(c, Aggregator):
+ dataSource = c(dataSource, cellrequest)
+ for k, v in ca.iteritems():
+ dataSource.__setattr__(k, v)
+ elif issubclass(c, Sampler):
+ dataSource = c(dataSource)
+ for k, v in ca.iteritems():
+ dataSource.__setattr__(k, v)
+ self._id += 1
+ self._aggregators[ self._id ] = dataSource
+ return self._id
+
+ def __getitem__(self, aggregatorid):
+ try:
+ return self._aggregators[ aggregatorid ]
+ except:
+ raise AggregatorError("Aggregator with id %s not found" % aggregatorid)
+
+ def pop(self, aggregatorid):
+ try:
+ self._aggregators.pop( aggregatorid )
+ except KeyError:
+ print "WW: Aggregator with id %s not found" % aggregatorid
+ \ No newline at end of file
diff --git a/Monitoring/src/main/python/DataProcessing/AggregatorManager.py.old b/Monitoring/src/main/python/DataProcessing/AggregatorManager.py.old
new file mode 100644
index 0000000..786959c
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/AggregatorManager.py.old
@@ -0,0 +1,39 @@
+'''
+Created on Dec 10, 2012
+
+@author: steger
+'''
+from DataProcessing.Aggregator import AggregatorError, Aggregator
+from DataProcessing.Sampler import Sampler
+
+class AggregatorManager(object):
+ def __init__(self):
+ self._id = 0;
+ self._aggregators = {}
+
+ def newAggregator(self, dataSource, cellrequest, commandflow):
+ for c, ca in commandflow:
+ if issubclass(c, Aggregator):
+ dataSource = c(dataSource, cellrequest)
+ for k, v in ca.iteritems():
+ dataSource.__setattr__(k, v)
+ elif issubclass(c, Sampler):
+ dataSource = c(dataSource)
+ for k, v in ca.iteritems():
+ dataSource.__setattr__(k, v)
+ self._id += 1
+ self._aggregators[ self._id ] = dataSource
+ return self._id
+
+ def __getitem__(self, aggregatorid):
+ try:
+ return self._aggregators[ aggregatorid ]
+ except:
+ raise AggregatorError("Aggregator with id %s not found" % aggregatorid)
+
+ def pop(self, aggregatorid):
+ try:
+ self._aggregators.pop( aggregatorid )
+ except KeyError:
+ print "WW: Aggregator with id %s not found" % aggregatorid
+ \ No newline at end of file
diff --git a/Monitoring/src/main/python/DataProcessing/Data$py.class b/Monitoring/src/main/python/DataProcessing/Data$py.class
new file mode 100644
index 0000000..6be2fee
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/Data$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/DataProcessing/Data.py b/Monitoring/src/main/python/DataProcessing/Data.py
new file mode 100644
index 0000000..d6f6dbc
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/Data.py
@@ -0,0 +1,296 @@
+from __future__ import with_statement
+'''
+Created on Sep 1, 2011
+
+@author: steger, jozsef
+@organization: ELTE
+@contact: steger@complex.elte.hu
+@author: laki, sandor
+'''
+
+from threading import Lock, RLock
+from DataProcessing.DataHeader import DataHeaderCell, DataHeader, DataError
+from DataProcessing.Unit import UnitManager
+from DataProcessing.DataSource import DataSource
+
+
+class Data(DataSource):
+ '''
+ @author: steger, jozsef
+ @summary:
+ This class contains the data in a tabular format.
+ All items in the same column are data of the same kind
+ whereas all data in the same record (row) are correlated.
+
+ Contents of cells of a given column are either single items or Data objects
+ as dictated by the header of the table.
+
+ New records can be added using the Record class, for which template generator is provided by this class.
+
+ DataReaders and other consumers can register Events at new data insertion or at clear.
+ '''
+
+ class Record(object):
+ '''
+ @author: steger, jozsef
+ @summary:
+ This class represents a given set of records that can be appended to a Data table.
+ It provides useful methods manipulate data within the record.
+ '''
+ def __init__(self, unitmanager, dataheader, size = 1):
+ '''
+ @summary: Constructor
+ @param unitmanager: necessary to handle conversion
+ @type unitmanager: UnitManager
+ @param dataheader: the record conforms to the data header provided here
+ @type dataheader: DataHeader
+ @param size: the number of items to handle at once, default is 1
+ @type size: integer
+ '''
+ self.um = unitmanager
+ self.record = {}
+ self.units = {}
+ self.subheaders = {}
+ self.subrecords = {}
+ self.size = size
+ self.names = dataheader._cellnames
+ for name, cell in dataheader._cells.iteritems():
+ if isinstance(cell, DataHeaderCell):
+ self.record[name] = [ None ] * self.size
+ self.units[name] = cell.unit
+ elif isinstance(cell, DataHeader):
+ self.subheaders[name] = cell
+ else:
+ raise DataError("Data header declaration is wrong")
+
+ def __str__(self):
+ return "<DataRecord %s, size: %d (%s; %s)>: " % (id(self), self.size, ','.join(self.record.keys()), ','.join(self.subheaders.keys()))
+
+ def clear(self, size = None):
+ '''
+ @summary: Clean the record containers and optionally resize the container
+ @note: if DataRecord container is resized, sub record pointers are invalidated
+ @param size: the requested new size of the container, default is None, which means keep the original size
+ @type size: integer
+ '''
+ if size is None:
+ for name in self.record.keys():
+ self.record[name] = [ None ] * self.size
+ if self.subrecords.has_key(name):
+ for r in self.subrecords[name]:
+ r.clear()
+ else:
+ self.size = size
+ for name in self.record.keys():
+ self.record[name] = [ None ] * self.size
+ self.subrecords.clear()
+
+ def getRecordTemplates(self, name, sizes = None):
+ '''
+ @summary: Sub record templates are pointing to table valued cells. This method allocates container to those data structures.
+ @param name: the column name, that point to table valued columns
+ @type name: string
+ @param sizes: a list of integers that indicate the sizes of each sub tables. Default is None, which means the allocation of single row containers
+ @type sizes: list/tuple of integers or None
+ @return: a list of Record containers with size items
+ @rtype: a list of Record
+ @raise DataError: column name not found / wrong record sizes
+ '''
+ if sizes == None:
+ sizes = [1] * self.size
+ if len(sizes) != self.size:
+ raise DataError("wrong record sizes requested")
+ if not self.subheaders.has_key(name):
+ raise DataError("Cannot find column name: %s" % name)
+ hdr = self.subheaders[name]
+ self.subrecords[name] = []
+ while len(sizes):
+ self.subrecords[name].append( Data.Record(unitmanager = self.um, dataheader = hdr, size = sizes.pop(0)) )
+ return self.subrecords[name]
+
+ def update(self, name, values, unit = None):
+ '''
+ @summary: Update a the column with the new value and make sure the unit is converted to the current unit of the model
+ @param name: the name of the column
+ @type name: string
+ @param values: a list of data values to update the cells
+ @type values: list
+ @param unit: the unit of the values in the list, default is None, which means it is the same as the current unit stated in the unit model
+ @type unit: string or None
+ @raise DataError: missing column name / table valued cells / size mismatch
+ '''
+ if not self.record.has_key(name):
+ raise DataError("Record has no column named %s" % name)
+ if not self.units.has_key(name):
+ raise DataError("Cannot update column named %s (table valued cells)" % name)
+ if len(values) != self.size:
+ raise DataError("The size of values don't match expected %d and got %d" % (len(values), self.size))
+ if unit is None:
+ self.record[name] = values[:]
+ elif isinstance(unit, UnitManager.Unit):
+ myunit = self.units[name]
+ if unit == myunit:
+ self.record[name] = values[:]
+ else:
+ self.record[name] = [ self.um.convert(value = quantity, from_unit = unit, to_unit = myunit) for quantity in values ]
+ else:
+ raise DataError("wrong type of unit")
+
+ def updateMany(self, names, values, units = None):
+ '''
+ @summary: Update more columns with a single call
+ @param names: a list of the non-table valued columns to update
+ @type names: list/tuple of string
+ @param values: a matrix of data values
+ @type values: list of list of value
+ @param units: a list of units corresponding to each columns, default is None, meaning everything is expected to be in the current unit
+ @type units: list/tuple of sting or None
+ @raise DataError: size mismatch / unknown column name
+ '''
+ names = list(names)
+ if len(values) != self.size:
+ raise DataError("The size of values don't match %d" % self.size)
+ for name in names:
+ if not self.record.has_key(name):
+ raise DataError("Record has no column named %s" % name)
+ transpose = dict( map(lambda n: (n, []), names) )
+ s = len(names)
+ idxs = range(s)
+ while len(values):
+ value = values.pop(0)
+ if len(value) == s:
+ for idx in idxs:
+ transpose[names[idx]].append(value.pop(0))
+ else:
+ raise DataError("Record size does not match")
+ if units is None:
+ units = [ None ] * s
+ else:
+ units = list(units)
+ while len(names):
+ name = names.pop(0)
+ unit = units.pop(0)
+ self.update(name = name, values = transpose[name], unit = unit)
+
+ def extract(self):
+ '''
+ @summary: Extract values stored in this record represented in a list in the order of names
+ @return: a list of values
+ @rtype: list
+ '''
+ retval = []
+ idx = 0
+ while idx < self.size:
+ rec = []
+ for name in self.names:
+ if self.record.has_key(name):
+ rec.append( self.record[name][idx] )
+ elif self.subrecords.has_key(name):
+ rec.append( self.subrecords[name][idx].extract() )
+ idx += 1
+ retval.append(tuple(rec))
+ return retval
+
+ def __init__(self, unitmanager, header):
+ '''
+ @summary: Constructor
+ @param unitmanager: necessary to handle conversion
+ @type unitmanager: UnitManager
+ @param header: the header declaration of the data table
+ @type header: DataHeader
+ @raise DataError: raised upon wrong table header is given
+ '''
+ if not isinstance(header, DataHeader):
+ raise DataError("attempt to allocate table with a wrong header")
+ self.um = unitmanager
+ DataSource.__init__(self, self)
+ self.header = header
+ self._rawrecords = []
+ self._tail = 0
+ self._seq = 0
+ self._readlock = RLock()
+ self._writelock = Lock()
+ self._data = self
+
+ def __str__(self):
+ '''
+ @summary: returns the name of the table and the python object id
+ @return: abbreviated representation of the table
+ @rtype: string
+ '''
+ return "<Data: %s %s>" % (self.header.name, id(self))
+
+ def __len__(self):
+ return len(self._rawrecords)
+
+ def __getitem__(self, k):
+ return self._rawrecords.__getitem__(k)
+
+ def _get_data(self):
+ return self
+
+ def _get_readlock(self):
+ return self._readlock
+
+ def _get_writelock(self):
+ return self._writelock
+
+ def _get_name(self):
+ '''
+ @summary: the name of the data is defined by the header
+ @return: the name of the header
+ @rtype: string
+ '''
+ return self.header.name
+
+ def _get_tail(self):
+ '''
+ @summary: Tail property indicates how many new records have been saved to the table in the last call
+ @return: number of new records
+ @rtype: integer
+ '''
+ return self._tail
+
+ def getTemplate(self, size = 1):
+ '''
+ @summary: Generate a helper class to extend the table with new values
+ @param size: the size of the new records wished to handle together, default is 1
+ @type size: integer
+ @return: an empty row with the structure dictated by the header of the table
+ @rtype: Record
+ '''
+ return self.Record(unitmanager = self.um, dataheader = self.header.getHeader(self.header.name), size = size)
+
+ def saveRecord(self, record):
+ '''
+ @summary: append values stored in the record to the table
+ @param record: a record with new data values
+ @type record: DataRecord
+ '''
+ #TODO: check if record is not corrupted
+ newrecords = record.extract()
+ self._tail = len(newrecords)
+ with self.writelock:
+ self._rawrecords.extend( newrecords )
+ self._onexpand()
+
+ def clear(self):
+ '''
+ @summary: delete all data records stored
+ '''
+ with self.writelock:
+ self._rawrecords = []
+ self._tail = 0
+ self._seq = 0
+ self._onclear()
+
+
+ readlock = property(_get_readlock,None,None)
+
+ tail = property(_get_tail,None,None)
+
+ data = property(_get_data,None,None)
+
+ name = property(_get_name,None,None)
+
+ writelock = property(_get_writelock,None,None)
diff --git a/Monitoring/src/main/python/DataProcessing/Data.py.old b/Monitoring/src/main/python/DataProcessing/Data.py.old
new file mode 100644
index 0000000..764ecb9
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/Data.py.old
@@ -0,0 +1,290 @@
+'''
+Created on Sep 1, 2011
+
+@author: steger, jozsef
+@organization: ELTE
+@contact: steger@complex.elte.hu
+@author: laki, sandor
+'''
+
+from threading import Lock, RLock
+from DataProcessing.DataHeader import DataHeaderCell, DataHeader, DataError
+from DataProcessing.Unit import UnitManager
+from DataProcessing.DataSource import DataSource
+
+
+class Data(DataSource):
+ '''
+ @author: steger, jozsef
+ @summary:
+ This class contains the data in a tabular format.
+ All items in the same column are data of the same kind
+ whereas all data in the same record (row) are correlated.
+
+ Contents of cells of a given column are either single items or Data objects
+ as dictated by the header of the table.
+
+ New records can be added using the Record class, for which template generator is provided by this class.
+
+ DataReaders and other consumers can register Events at new data insertion or at clear.
+ '''
+
+ class Record(object):
+ '''
+ @author: steger, jozsef
+ @summary:
+ This class represents a given set of records that can be appended to a Data table.
+ It provides useful methods manipulate data within the record.
+ '''
+ def __init__(self, unitmanager, dataheader, size = 1):
+ '''
+ @summary: Constructor
+ @param unitmanager: necessary to handle conversion
+ @type unitmanager: UnitManager
+ @param dataheader: the record conforms to the data header provided here
+ @type dataheader: DataHeader
+ @param size: the number of items to handle at once, default is 1
+ @type size: integer
+ '''
+ self.um = unitmanager
+ self.record = {}
+ self.units = {}
+ self.subheaders = {}
+ self.subrecords = {}
+ self.size = size
+ self.names = dataheader._cellnames
+ for name, cell in dataheader._cells.iteritems():
+ if isinstance(cell, DataHeaderCell):
+ self.record[name] = [ None ] * self.size
+ self.units[name] = cell.unit
+ elif isinstance(cell, DataHeader):
+ self.subheaders[name] = cell
+ else:
+ raise DataError("Data header declaration is wrong")
+
+ def __str__(self):
+ return "<DataRecord %s, size: %d (%s; %s)>: " % (id(self), self.size, ','.join(self.record.keys()), ','.join(self.subheaders.keys()))
+
+ def clear(self, size = None):
+ '''
+ @summary: Clean the record containers and optionally resize the container
+ @note: if DataRecord container is resized, sub record pointers are invalidated
+ @param size: the requested new size of the container, default is None, which means keep the original size
+ @type size: integer
+ '''
+ if size is None:
+ for name in self.record.keys():
+ self.record[name] = [ None ] * self.size
+ if self.subrecords.has_key(name):
+ for r in self.subrecords[name]:
+ r.clear()
+ else:
+ self.size = size
+ for name in self.record.keys():
+ self.record[name] = [ None ] * self.size
+ self.subrecords.clear()
+
+ def getRecordTemplates(self, name, sizes = None):
+ '''
+ @summary: Sub record templates are pointing to table valued cells. This method allocates container to those data structures.
+ @param name: the column name, that point to table valued columns
+ @type name: string
+ @param sizes: a list of integers that indicate the sizes of each sub tables. Default is None, which means the allocation of single row containers
+ @type sizes: list/tuple of integers or None
+ @return: a list of Record containers with size items
+ @rtype: a list of Record
+ @raise DataError: column name not found / wrong record sizes
+ '''
+ if sizes == None:
+ sizes = [1] * self.size
+ if len(sizes) != self.size:
+ raise DataError("wrong record sizes requested")
+ if not self.subheaders.has_key(name):
+ raise DataError("Cannot find column name: %s" % name)
+ hdr = self.subheaders[name]
+ self.subrecords[name] = []
+ while len(sizes):
+ self.subrecords[name].append( Data.Record(unitmanager = self.um, dataheader = hdr, size = sizes.pop(0)) )
+ return self.subrecords[name]
+
+ def update(self, name, values, unit = None):
+ '''
+ @summary: Update a the column with the new value and make sure the unit is converted to the current unit of the model
+ @param name: the name of the column
+ @type name: string
+ @param values: a list of data values to update the cells
+ @type values: list
+ @param unit: the unit of the values in the list, default is None, which means it is the same as the current unit stated in the unit model
+ @type unit: string or None
+ @raise DataError: missing column name / table valued cells / size mismatch
+ '''
+ if not self.record.has_key(name):
+ raise DataError("Record has no column named %s" % name)
+ if not self.units.has_key(name):
+ raise DataError("Cannot update column named %s (table valued cells)" % name)
+ if len(values) != self.size:
+ raise DataError("The size of values don't match expected %d and got %d" % (len(values), self.size))
+ if unit is None:
+ self.record[name] = values[:]
+ elif isinstance(unit, UnitManager.Unit):
+ myunit = self.units[name]
+ if unit == myunit:
+ self.record[name] = values[:]
+ else:
+ self.record[name] = [ self.um.convert(value = quantity, from_unit = unit, to_unit = myunit) for quantity in values ]
+ else:
+ raise DataError("wrong type of unit")
+
+ def updateMany(self, names, values, units = None):
+ '''
+ @summary: Update more columns with a single call
+ @param names: a list of the non-table valued columns to update
+ @type names: list/tuple of string
+ @param values: a matrix of data values
+ @type values: list of list of value
+ @param units: a list of units corresponding to each columns, default is None, meaning everything is expected to be in the current unit
+ @type units: list/tuple of sting or None
+ @raise DataError: size mismatch / unknown column name
+ '''
+ names = list(names)
+ if len(values) != self.size:
+ raise DataError("The size of values don't match %d" % self.size)
+ for name in names:
+ if not self.record.has_key(name):
+ raise DataError("Record has no column named %s" % name)
+ transpose = dict( map(lambda n: (n, []), names) )
+ s = len(names)
+ idxs = range(s)
+ while len(values):
+ value = values.pop(0)
+ if len(value) == s:
+ for idx in idxs:
+ transpose[names[idx]].append(value.pop(0))
+ else:
+ raise DataError("Record size does not match")
+ if units is None:
+ units = [ None ] * s
+ else:
+ units = list(units)
+ while len(names):
+ name = names.pop(0)
+ unit = units.pop(0)
+ self.update(name = name, values = transpose[name], unit = unit)
+
+ def extract(self):
+ '''
+ @summary: Extract values stored in this record represented in a list in the order of names
+ @return: a list of values
+ @rtype: list
+ '''
+ retval = []
+ idx = 0
+ while idx < self.size:
+ rec = []
+ for name in self.names:
+ if self.record.has_key(name):
+ rec.append( self.record[name][idx] )
+ elif self.subrecords.has_key(name):
+ rec.append( self.subrecords[name][idx].extract() )
+ idx += 1
+ retval.append(tuple(rec))
+ return retval
+
+ def __init__(self, unitmanager, header):
+ '''
+ @summary: Constructor
+ @param unitmanager: necessary to handle conversion
+ @type unitmanager: UnitManager
+ @param header: the header declaration of the data table
+ @type header: DataHeader
+ @raise DataError: raised upon wrong table header is given
+ '''
+ if not isinstance(header, DataHeader):
+ raise DataError("attempt to allocate table with a wrong header")
+ self.um = unitmanager
+ DataSource.__init__(self, self)
+ self.header = header
+ self._rawrecords = []
+ self._tail = 0
+ self._seq = 0
+ self._readlock = RLock()
+ self._writelock = Lock()
+ self._data = self
+
+ def __str__(self):
+ '''
+ @summary: returns the name of the table and the python object id
+ @return: abbreviated representation of the table
+ @rtype: string
+ '''
+ return "<Data: %s %s>" % (self.header.name, id(self))
+
+ def __len__(self):
+ return len(self._rawrecords)
+
+ def __getitem__(self, k):
+ return self._rawrecords.__getitem__(k)
+
+ @property
+ def data(self):
+ return self
+
+ @property
+ def readlock(self):
+ return self._readlock
+
+ @property
+ def writelock(self):
+ return self._writelock
+
+ @property
+ def name(self):
+ '''
+ @summary: the name of the data is defined by the header
+ @return: the name of the header
+ @rtype: string
+ '''
+ return self.header.name
+
+ @property
+ def tail(self):
+ '''
+ @summary: Tail property indicates how many new records have been saved to the table in the last call
+ @return: number of new records
+ @rtype: integer
+ '''
+ return self._tail
+
+ def getTemplate(self, size = 1):
+ '''
+ @summary: Generate a helper class to extend the table with new values
+ @param size: the size of the new records wished to handle together, default is 1
+ @type size: integer
+ @return: an empty row with the structure dictated by the header of the table
+ @rtype: Record
+ '''
+ return self.Record(unitmanager = self.um, dataheader = self.header.getHeader(self.header.name), size = size)
+
+ def saveRecord(self, record):
+ '''
+ @summary: append values stored in the record to the table
+ @param record: a record with new data values
+ @type record: DataRecord
+ '''
+ #TODO: check if record is not corrupted
+ newrecords = record.extract()
+ self._tail = len(newrecords)
+ with self.writelock:
+ self._rawrecords.extend( newrecords )
+ self._onexpand()
+
+ def clear(self):
+ '''
+ @summary: delete all data records stored
+ '''
+ with self.writelock:
+ self._rawrecords = []
+ self._tail = 0
+ self._seq = 0
+ self._onclear()
+
diff --git a/Monitoring/src/main/python/DataProcessing/DataError$py.class b/Monitoring/src/main/python/DataProcessing/DataError$py.class
new file mode 100644
index 0000000..4fa0f3d
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/DataError$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/DataProcessing/DataError.py b/Monitoring/src/main/python/DataProcessing/DataError.py
new file mode 100644
index 0000000..4933f85
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/DataError.py
@@ -0,0 +1,29 @@
+'''
+Created on Dec 20, 2012
+
+@author: steger, jozsef
+@organization: ELTE
+@contact: steger@complex.elte.hu
+'''
+
+class DataError(Exception):
+ pass
+
+class PrefixError(DataError):
+ pass
+
+class UnitError(DataError):
+ pass
+
+class DimensionError(DataError):
+ pass
+
+class ParameterError(DataError):
+ pass
+
+class SamplerError(DataError):
+ pass
+
+class AggregatorError(DataError):
+ pass
+
diff --git a/Monitoring/src/main/python/DataProcessing/DataError.py.old b/Monitoring/src/main/python/DataProcessing/DataError.py.old
new file mode 100644
index 0000000..4933f85
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/DataError.py.old
@@ -0,0 +1,29 @@
+'''
+Created on Dec 20, 2012
+
+@author: steger, jozsef
+@organization: ELTE
+@contact: steger@complex.elte.hu
+'''
+
+class DataError(Exception):
+ pass
+
+class PrefixError(DataError):
+ pass
+
+class UnitError(DataError):
+ pass
+
+class DimensionError(DataError):
+ pass
+
+class ParameterError(DataError):
+ pass
+
+class SamplerError(DataError):
+ pass
+
+class AggregatorError(DataError):
+ pass
+
diff --git a/Monitoring/src/main/python/DataProcessing/DataFormatter$py.class b/Monitoring/src/main/python/DataProcessing/DataFormatter$py.class
new file mode 100644
index 0000000..ec2ed67
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/DataFormatter$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/DataProcessing/DataFormatter.py b/Monitoring/src/main/python/DataProcessing/DataFormatter.py
new file mode 100644
index 0000000..768b83b
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/DataFormatter.py
@@ -0,0 +1,145 @@
+'''
+Created on 08.08.2011
+
+@author: steger
+'''
+from DataProcessing.DataReader import DataReader
+from DataProcessing.Data import Data
+from DataProcessing.DataError import DataError
+
+class Formatter(DataReader):
+ def __init__(self, datasource):
+ '''
+ Constructor
+ '''
+ DataReader.__init__(self, datasource._data)
+
+ def _cell(self):
+ raise DataError("Implement _cell() method")
+
+ def header(self):
+ raise DataError("Implement header() method")
+
+ def serialize(self):
+ raise DataError("Implement serialize() method")
+
+ def _get_name(self):
+ return self.source.name
+
+
+ name = property(_get_name,None,None)
+class JsonFormatter(Formatter):
+ '''
+ @summary:
+ Serialize Data in JSON format
+ '''
+
+ def _cell(self, c):
+ '''
+ @summary: serialize a column in JSON format
+ '''
+ try:
+ feature = "\n \"FEATURE\": \"%s\"," % c.feature
+ except:
+ feature = ""
+ score = c.unit.reference.count('_')
+ if score == 0:
+ ret = """{%s
+ "NAME" : "%s",
+ "DIMENTSION" : "%s",
+ "UNIT" : "%s"
+ }""" % (feature, c.name, c.dimension.name, c.unit.reference)
+ elif score == 1:
+ prefix, base = c.unit.reference.split('_')
+ ret = """{%s
+ "NAME" : "%s",
+ "DIMENTSION" : "%s",
+ "PREFIX" : "%s",
+ "UNIT" : "%s"
+ }""" % (feature, c.name, c.dimension.name, prefix, base)
+ else:
+ ret = "ERROR: %s" % c
+ return ret
+
+ def header(self):
+ '''
+ @summary: serialize full header
+ '''
+ return """{
+ "NAME" : "DataHeader %s",
+ "HDRINFO" : [
+ %s
+ ]
+ }""" % (id(self.source.header), ",\n ".join([ self._cell(c) for c in self.headercells() ]))
+
+ def serialize(self):
+ '''
+ @summary: serialize the header and the new lines of the table into JSON format
+ @return: formatted string representation of the table
+ @rtype: string
+ '''
+ if not self.sourceExpanded.isSet():
+ return ""
+ r = []
+ for rec in self:
+ st = []
+ for d in rec:
+ if isinstance(d, Data):
+ #FIXME:
+ st.append( d._dump() )
+ else:
+ if d is None:
+ tmpd = "null"
+ else:
+ tmpd = str(d)
+ st.append( tmpd )
+ r.append("[ %s ]" % ", ".join(st))
+ return """{
+ "TYPE" : "%s",
+ "ID" : %d,
+ "HDR" : %s,
+ "DATA" : [
+ %s
+ ]
+}""" % (self.source.header.name, id(self), self.header(), ",\n ".join(r))
+
+class DumbFormatter(Formatter):
+ '''
+ @summary:
+ Serialize Data in a trivial format
+ '''
+
+ def _cell(self, c):
+ '''
+ @summary: serialize column
+ '''
+ try:
+ return "%s (%s/%s) [%s]" % (c.name, c.feature, c.dimension.name, c.unit)
+ except:
+ return "%s (/%s) [%s]" % (c.name, c.dimension.name, c.unit)
+
+ def header(self):
+ '''
+ @summary: serialize full header
+ '''
+ return "<DataHeader %s>: {%s: [%s]}" % (id(self.source.header), self.name, ", ".join([ self._cell(c) for c in self.headercells() ]))
+
+ def serialize(self):
+ '''
+ @summary: serialize the header and the new lines of the table
+ @return: formatted string representation of the table, if no new data are ready the empty string is returned
+ @rtype: string
+ '''
+ if not self.sourceExpanded.isSet():
+ return ""
+ r = []
+ for rec in self:
+ st = []
+ for d in rec:
+ if isinstance(d, Data):
+ #FIXME:
+ st.append( d._dump() )
+ else:
+ st.append( str(d) )
+ r.append("(%s)" % ", ".join(st))
+ return "{%s:\nHDR:%s\n DATA:[\n%s\n]}" % (str(self), self.header(), ", \n".join(r))
diff --git a/Monitoring/src/main/python/DataProcessing/DataFormatter.py.old b/Monitoring/src/main/python/DataProcessing/DataFormatter.py.old
new file mode 100644
index 0000000..1cc88c9
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/DataFormatter.py.old
@@ -0,0 +1,140 @@
+'''
+Created on 08.08.2011
+
+@author: steger
+'''
+from DataProcessing.DataReader import DataReader
+from DataProcessing.Data import Data
+from DataProcessing.DataError import DataError
+
+class Formatter(DataReader):
+ def __init__(self, datasource):
+ '''
+ Constructor
+ '''
+ DataReader.__init__(self, datasource._data)
+
+ def _cell(self):
+ raise DataError("Implement _cell() method")
+
+ def header(self):
+ raise DataError("Implement header() method")
+
+ def serialize(self):
+ raise DataError("Implement serialize() method")
+
+ @property
+ def name(self):
+ return self.source.name
+
+class JsonFormatter(Formatter):
+ '''
+ @summary:
+ Serialize Data in JSON format
+ '''
+
+ def _cell(self, c):
+ '''
+ @summary: serialize a column in JSON format
+ '''
+ try:
+ feature = "\n \"FEATURE\": \"%s\"," % c.feature
+ except:
+ feature = ""
+ score = c.unit.reference.count('_')
+ if score == 0:
+ ret = """{%s
+ "NAME" : "%s",
+ "DIMENTSION" : "%s",
+ "UNIT" : "%s"
+ }""" % (feature, c.name, c.dimension.name, c.unit.reference)
+ elif score == 1:
+ prefix, base = c.unit.reference.split('_')
+ ret = """{%s
+ "NAME" : "%s",
+ "DIMENTSION" : "%s",
+ "PREFIX" : "%s",
+ "UNIT" : "%s"
+ }""" % (feature, c.name, c.dimension.name, prefix, base)
+ else:
+ ret = "ERROR: %s" % c
+ return ret
+
+ def header(self):
+ '''
+ @summary: serialize full header
+ '''
+ return """{
+ "NAME" : "DataHeader %s",
+ "HDRINFO" : [
+ %s
+ ]
+ }""" % (id(self.source.header), ",\n ".join([ self._cell(c) for c in self.headercells() ]))
+
+ def serialize(self):
+ '''
+ @summary: serialize the header and the new lines of the table into JSON format
+ @return: formatted string representation of the table
+ @rtype: string
+ '''
+ if not self.sourceExpanded.isSet():
+ return ""
+ r = []
+ for rec in self:
+ st = []
+ for d in rec:
+ if isinstance(d, Data):
+ #FIXME:
+ st.append( d._dump() )
+ else:
+ st.append( str(d) )
+ r.append("[ %s ]" % ", ".join(st))
+ return """{
+ "TYPE" : "%s",
+ "ID" : %d,
+ "HDR" : %s,
+ "DATA" : [
+ %s
+ ]
+}""" % (self.source.header.name, id(self), self.header(), ",\n ".join(r))
+
+class DumbFormatter(Formatter):
+ '''
+ @summary:
+ Serialize Data in a trivial format
+ '''
+
+ def _cell(self, c):
+ '''
+ @summary: serialize column
+ '''
+ try:
+ return "%s (%s/%s) [%s]" % (c.name, c.feature, c.dimension.name, c.unit)
+ except:
+ return "%s (/%s) [%s]" % (c.name, c.dimension.name, c.unit)
+
+ def header(self):
+ '''
+ @summary: serialize full header
+ '''
+ return "<DataHeader %s>: {%s: [%s]}" % (id(self.source.header), self.name, ", ".join([ self._cell(c) for c in self.headercells() ]))
+
+ def serialize(self):
+ '''
+ @summary: serialize the header and the new lines of the table
+ @return: formatted string representation of the table, if no new data are ready the empty string is returned
+ @rtype: string
+ '''
+ if not self.sourceExpanded.isSet():
+ return ""
+ r = []
+ for rec in self:
+ st = []
+ for d in rec:
+ if isinstance(d, Data):
+ #FIXME:
+ st.append( d._dump() )
+ else:
+ st.append( str(d) )
+ r.append("(%s)" % ", ".join(st))
+ return "{%s:\nHDR:%s\n DATA:[\n%s\n]}" % (str(self), self.header(), ", \n".join(r))
diff --git a/Monitoring/src/main/python/DataProcessing/DataHeader$py.class b/Monitoring/src/main/python/DataProcessing/DataHeader$py.class
new file mode 100644
index 0000000..032fa9a
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/DataHeader$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/DataProcessing/DataHeader.py b/Monitoring/src/main/python/DataProcessing/DataHeader.py
new file mode 100644
index 0000000..13dd2ea
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/DataHeader.py
@@ -0,0 +1,202 @@
+from __future__ import with_statement
+'''
+Created on Sep 1, 2011
+
+@author: steger, jozsef
+@organization: ELTE
+@contact: steger@complex.elte.hu
+@author: laki, sandor
+'''
+
+from DataProcessing.Dimension import DimensionManager
+from DataProcessing.DataError import DataError
+from DataProcessing.DataHeaderCell import DataHeaderCell, CellRequestByName,\
+ CellRequestByFeature
+
+class DataHeader(object):
+ '''
+ @author: steger, jozsef
+ @summary:
+ This class represents the full header of a table.
+ One can construct the header as a single step,
+ if they provide a header description or they can use
+ methods to add new columns.
+
+ In order to be able to represent a wide variety of data and relationship
+ between them, a column can refer to another table.
+ In that latter case a specific column refer to another DataHeader.
+ '''
+
+ def __init__(self, name):
+ '''
+ @summary: Constructor
+ @param name: the name of the table
+ @type name: string
+ @raise DataError: corrupt header description
+ '''
+ self._name = name
+ self._cellnames = []
+ self._cells = {}
+
+ def __iter__(self):
+ for cn in self._cellnames:
+ yield self._cells[cn]
+
+ def __len__(self):
+ '''
+ @summary: Return the number of columns
+ @return: the number of columns currently set
+ @rtype: integer
+ '''
+ return len(self._cellnames)
+
+ def __eq__(self, header):
+ '''
+ @summary: Comparison operator of table headers.
+ Two tables are declared equal, if all the columns' names and their unit models are the same.
+ Two headers are still regarded equal if the order of their columns are different
+ or the current unit of the corresponding columns are not the same.
+ @raise DataError: if not DataHeader instances are compared
+ @return: True if both the header name and all columns match, their order may vary
+ @rtype: boolean
+ '''
+ if not isinstance(header, DataHeader):
+ raise DataError("wrong type to compare")
+ if self.name != header.name:
+ return False
+ if len(self._cellnames) != len(header._cellnames):
+ return False
+ if self._cells.keys() != header._cells.keys():
+ return False
+ for n in self._cellnames:
+ if self._cells[n] != header._cells[n]:
+ return False
+ return True
+
+ def __ne__(self, header):
+ '''
+ @summary: comparison operator of table headers.
+ @return: True if tables headers differ
+ @rtype: boolean
+ '''
+ return not self.__eq__(header)
+
+ def _get_name(self):
+ return self._name
+
+ def has_name(self, name):
+ '''
+ @summary: Check for the existence of a given column name
+ @param name: the name of the column looking for
+ @type name: string
+ @return: true if such a name exists
+ @rtype: boolean
+ '''
+ return name in self._cellnames
+
+ def addColumn(self, cell):
+ '''
+ @summary: Append a new column at the end of the current table header structure
+ @param cell: pointer to the header of the new column
+ @type cell: DataHeader or DataHeaderCell
+ @raise DataError: cell is of a wrong type
+ '''
+ ishdr = isinstance(cell, DataHeader)
+ if ishdr or isinstance(cell, DataHeaderCell):
+ name = cell.name
+ if self.has_name(name):
+ raise DataError("attempt to add a column with an already existing name (%s)" % cell.name)
+ self._cells[name] = cell
+ self._cellnames.append(name)
+ else:
+ raise DataError("attempt to add a wrong type of header cell")
+
+ def removeColumn(self, name):
+ '''
+ @summary: remove a named column if it exists in the header. Otherwise do silently nothing
+ @param name: the name of the column to remove
+ @type name: string
+ '''
+ if self.has_name(name):
+ self._cells.pop(name)
+ self._cellnames.pop(self._cellnames.index(name))
+
+ def getHeader(self, name):
+ '''
+ @summary: Return a pointer to the named sub header in the naming hierarchy
+ @param name: the name of the sub header searched
+ @type name: string
+ @raise DataError: name not found
+ '''
+ if name.count('.') == 0:
+ if name == self.name:
+ return self
+ if self.has_name(name) and isinstance(self._cells[name], DataHeader):
+ return self._cells[name]
+ elif name.count('.') == 1:
+ n_pre, n_post = name.split('.', 1)
+ if n_pre == self.name and self.has_name(n_post) and isinstance(self._cells[n_post], DataHeader):
+ return self._cells[n_post]
+ else:
+ n_pre, n, n_post = name.split('.', 2)
+ if n_pre == self.name and self.has_name(n) and isinstance(self._cells[n], DataHeader):
+ return self._cells[n].getHeader(n_post)
+ raise DataError("Lost in the naming hierarchy: %s < %s" % (self.name, name))
+
+
+ name = property(_get_name,None,None)
+#FIXME: complex table lookup is not implemented
+ def getCell(self, cellrequest):
+ '''
+ @summary: Return the index and the cell referenced by a name
+ @param cellrequest:
+ @type name: CellRequest
+ @return: index and the cell
+ @rtype: (int, Cell)
+ @raise DataError: name not found
+ '''
+ if isinstance(cellrequest, CellRequestByName):
+ name = cellrequest.name
+ try:
+ yield (self._cellnames.index(name), self._cells[name])
+ except:
+ DataError("Cell with name %s not found" % name)
+ elif isinstance(cellrequest, CellRequestByFeature):
+ for c in self:
+ try:
+ if cellrequest == c:
+ yield (self._cellnames.index(c.name), c)
+ except DataError:
+ continue
+ else:
+ raise DataError("wrong request type")
+
+
+
+class DataHeaderGeneratedByDescription(DataHeader):
+ def __init__(self, name, headerdescription):
+ '''
+ @summary: Constructor
+ @param name: the name of the table
+ @type name: string
+ @param headerdescription: the description of a full table header
+ @param headerdescription: list or None
+ @raise DataError: corrupt header description
+ '''
+ DataHeader.__init__(self, name)
+ for item in headerdescription:
+ if len(item) == 2:
+ name, description = item
+ unit = None
+ else:
+ name, description, unit = item
+ if self.has_name(name):
+ raise DataError("Duplicate column name declaration (%s)" % name)
+ if description is None or isinstance(description, DimensionManager.Dimension):
+ cell = DataHeaderCell(name = name, dimension = description, unit = unit)
+ self.addColumn(cell)
+ elif isinstance(description, list):
+ hdr = DataHeaderGeneratedByDescription(name = name, headerdescription = description)
+ self.addColumn(hdr)
+ else:
+ raise DataError("corrupt header description (%s)" % name)
diff --git a/Monitoring/src/main/python/DataProcessing/DataHeader.py.old b/Monitoring/src/main/python/DataProcessing/DataHeader.py.old
new file mode 100644
index 0000000..722094e
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/DataHeader.py.old
@@ -0,0 +1,200 @@
+'''
+Created on Sep 1, 2011
+
+@author: steger, jozsef
+@organization: ELTE
+@contact: steger@complex.elte.hu
+@author: laki, sandor
+'''
+
+from DataProcessing.Dimension import DimensionManager
+from DataProcessing.DataError import DataError
+from DataProcessing.DataHeaderCell import DataHeaderCell, CellRequestByName,\
+ CellRequestByFeature
+
+class DataHeader(object):
+ '''
+ @author: steger, jozsef
+ @summary:
+ This class represents the full header of a table.
+ One can construct the header as a single step,
+ if they provide a header description or they can use
+ methods to add new columns.
+
+ In order to be able to represent a wide variety of data and relationship
+ between them, a column can refer to another table.
+ In that latter case a specific column refer to another DataHeader.
+ '''
+
+ def __init__(self, name):
+ '''
+ @summary: Constructor
+ @param name: the name of the table
+ @type name: string
+ @raise DataError: corrupt header description
+ '''
+ self._name = name
+ self._cellnames = []
+ self._cells = {}
+
+ def __iter__(self):
+ for cn in self._cellnames:
+ yield self._cells[cn]
+
+ def __len__(self):
+ '''
+ @summary: Return the number of columns
+ @return: the number of columns currently set
+ @rtype: integer
+ '''
+ return len(self._cellnames)
+
+ def __eq__(self, header):
+ '''
+ @summary: Comparison operator of table headers.
+ Two tables are declared equal, if all the columns' names and their unit models are the same.
+ Two headers are still regarded equal if the order of their columns are different
+ or the current unit of the corresponding columns are not the same.
+ @raise DataError: if not DataHeader instances are compared
+ @return: True if both the header name and all columns match, their order may vary
+ @rtype: boolean
+ '''
+ if not isinstance(header, DataHeader):
+ raise DataError("wrong type to compare")
+ if self.name != header.name:
+ return False
+ if len(self._cellnames) != len(header._cellnames):
+ return False
+ if self._cells.keys() != header._cells.keys():
+ return False
+ for n in self._cellnames:
+ if self._cells[n] != header._cells[n]:
+ return False
+ return True
+
+ def __ne__(self, header):
+ '''
+ @summary: comparison operator of table headers.
+ @return: True if tables headers differ
+ @rtype: boolean
+ '''
+ return not self.__eq__(header)
+
+ @property
+ def name(self):
+ return self._name
+
+ def has_name(self, name):
+ '''
+ @summary: Check for the existence of a given column name
+ @param name: the name of the column looking for
+ @type name: string
+ @return: true if such a name exists
+ @rtype: boolean
+ '''
+ return name in self._cellnames
+
+ def addColumn(self, cell):
+ '''
+ @summary: Append a new column at the end of the current table header structure
+ @param cell: pointer to the header of the new column
+ @type cell: DataHeader or DataHeaderCell
+ @raise DataError: cell is of a wrong type
+ '''
+ ishdr = isinstance(cell, DataHeader)
+ if ishdr or isinstance(cell, DataHeaderCell):
+ name = cell.name
+ if self.has_name(name):
+ raise DataError("attempt to add a column with an already existing name (%s)" % cell.name)
+ self._cells[name] = cell
+ self._cellnames.append(name)
+ else:
+ raise DataError("attempt to add a wrong type of header cell")
+
+ def removeColumn(self, name):
+ '''
+ @summary: remove a named column if it exists in the header. Otherwise do silently nothing
+ @param name: the name of the column to remove
+ @type name: string
+ '''
+ if self.has_name(name):
+ self._cells.pop(name)
+ self._cellnames.pop(self._cellnames.index(name))
+
+ def getHeader(self, name):
+ '''
+ @summary: Return a pointer to the named sub header in the naming hierarchy
+ @param name: the name of the sub header searched
+ @type name: string
+ @raise DataError: name not found
+ '''
+ if name.count('.') == 0:
+ if name == self.name:
+ return self
+ if self.has_name(name) and isinstance(self._cells[name], DataHeader):
+ return self._cells[name]
+ elif name.count('.') == 1:
+ n_pre, n_post = name.split('.', 1)
+ if n_pre == self.name and self.has_name(n_post) and isinstance(self._cells[n_post], DataHeader):
+ return self._cells[n_post]
+ else:
+ n_pre, n, n_post = name.split('.', 2)
+ if n_pre == self.name and self.has_name(n) and isinstance(self._cells[n], DataHeader):
+ return self._cells[n].getHeader(n_post)
+ raise DataError("Lost in the naming hierarchy: %s < %s" % (self.name, name))
+
+#FIXME: complex table lookup is not implemented
+ def getCell(self, cellrequest):
+ '''
+ @summary: Return the index and the cell referenced by a name
+ @param cellrequest:
+ @type name: CellRequest
+ @return: index and the cell
+ @rtype: (int, Cell)
+ @raise DataError: name not found
+ '''
+ if isinstance(cellrequest, CellRequestByName):
+ name = cellrequest.name
+ try:
+ yield (self._cellnames.index(name), self._cells[name])
+ except:
+ DataError("Cell with name %s not found" % name)
+ elif isinstance(cellrequest, CellRequestByFeature):
+ for c in self:
+ try:
+ if cellrequest == c:
+ yield (self._cellnames.index(c.name), c)
+ except DataError:
+ continue
+ else:
+ raise DataError("wrong request type")
+
+
+
+class DataHeaderGeneratedByDescription(DataHeader):
+ def __init__(self, name, headerdescription):
+ '''
+ @summary: Constructor
+ @param name: the name of the table
+ @type name: string
+ @param headerdescription: the description of a full table header
+ @param headerdescription: list or None
+ @raise DataError: corrupt header description
+ '''
+ DataHeader.__init__(self, name)
+ for item in headerdescription:
+ if len(item) == 2:
+ name, description = item
+ unit = None
+ else:
+ name, description, unit = item
+ if self.has_name(name):
+ raise DataError("Duplicate column name declaration (%s)" % name)
+ if description is None or isinstance(description, DimensionManager.Dimension):
+ cell = DataHeaderCell(name = name, dimension = description, unit = unit)
+ self.addColumn(cell)
+ elif isinstance(description, list):
+ hdr = DataHeaderGeneratedByDescription(name = name, headerdescription = description)
+ self.addColumn(hdr)
+ else:
+ raise DataError("corrupt header description (%s)" % name)
diff --git a/Monitoring/src/main/python/DataProcessing/DataHeaderCell$py.class b/Monitoring/src/main/python/DataProcessing/DataHeaderCell$py.class
new file mode 100644
index 0000000..1759ed4
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/DataHeaderCell$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/DataProcessing/DataHeaderCell.py b/Monitoring/src/main/python/DataProcessing/DataHeaderCell.py
new file mode 100644
index 0000000..4d6ea95
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/DataHeaderCell.py
@@ -0,0 +1,155 @@
+'''
+Created on Dec 20, 2012
+
+@author: steger, jozsef
+@organization: ELTE
+@contact: steger@complex.elte.hu
+'''
+
+from DataProcessing.Dimension import DimensionManager
+from DataProcessing.DataError import DataError
+from DataProcessing.Unit import UnitManager
+
+class Cell(object):
+ '''
+ @author: steger, jozsef
+ @summary:
+ This class represents meta information of a single table column.
+ It stores the following information:
+ - the name of the cell,
+ - the feature associated to the underlying data,
+ - the dimension of the underlying data,
+ - the unit of the underlying data,
+ '''
+ def __init__(self):
+ self._name = None
+ self._dimension = None
+ self._unit = None
+ self._feature = None
+
+ def _get_name(self):
+ if self._name is None:
+ raise DataError("name property is not set")
+ return self._name
+ def _set_name(self, name):
+ if not isinstance(name, basestring):
+ raise DataError("name is not a string")
+ if name.count('.'):
+ raise DataError("name must not contain any periods (%s)" % name)
+ if self._name is not None and self._name != name:
+ raise DataError("name property cannot be modified")
+ self._name = name
+
+ def _get_dimension(self):
+ if not self._dimension:
+ raise DataError("dimension property is not set")
+ return self._dimension
+ def _set_dimension(self, dimension):
+ if not isinstance(dimension, DimensionManager.Dimension):
+ raise DataError("dimension is invalid")
+ if self._unit is not None:
+ if not dimension.containsUnit(self._unit):
+ raise DataError("unit %s is not in the basin of dimension %s" % (self.unit, dimension))
+ self._dimension = dimension
+
+ def _get_unit(self):
+ if self._unit is None:
+ return self.dimension.unit
+ else:
+ return self._unit
+ def _set_unit(self, unit):
+ if not isinstance(unit, UnitManager.Unit):
+ raise DataError("unit is invalid")
+ if self._dimension is not None:
+ if not self.dimension.containsUnit(unit):
+ raise DataError("unit %s is not in the basin of dimension %s" % (unit, self.dimension))
+ self._unit = unit
+
+ def _get_feature(self):
+ if self._feature is None:
+ raise DataError("feature property is not set")
+ return self._feature
+ def _set_feature(self, feature):
+ if self._feature is not None and self._feature != feature:
+ raise DataError("feature property cannot be modified")
+ self._feature = feature
+
+ def __eq__(self, cell):
+ if not isinstance(cell, Cell):
+ raise DataError("type error expecting Cell for comparison")
+ return self._name == cell._name and self._feature == cell._feature and self._unit == cell._unit and self._dimension == cell._dimension
+
+ def __ne__(self, cell):
+ '''
+ @summary: comparison operator of column types.
+ @return: True if column names or their units differ
+ @rtype: boolean
+ '''
+ return not self.__eq__(cell)
+
+
+
+ feature = property(_get_feature,_set_feature,None)
+
+ name = property(_get_name,_set_name,None)
+
+ unit = property(_get_unit,_set_unit,None)
+
+ dimension = property(_get_dimension,_set_dimension,None)
+class DataHeaderCell(Cell):
+ def __init__(self, name, dimension, feature = None, unit = None):
+ Cell.__init__(self)
+ self.name = name
+ self.dimension = dimension
+ if unit is not None:
+ self.unit = unit
+ if feature is not None:
+ self.feature = feature
+
+class CellRequest(Cell): pass
+
+class CellRequestByName(CellRequest):
+ '''
+ @author: steger, jozsef
+ @summary:
+ This class represents the user request for a data column matching the name of the column.
+ One can specify the requested unit.
+ '''
+ def __init__(self, name, unit = None):
+ '''
+ @summary: Constructor
+ @param name: the name of the requested column
+ @type name: string
+ @param unit: the requested unit, default is None, which means no conversion request
+ @type unit: string or None
+ '''
+ Cell.__init__(self)
+ self.name = name
+ if unit is not None:
+ self.unit = unit
+
+ def __eq__(self, cell):
+ return self.name == cell.name
+
+class CellRequestByFeature(CellRequest):
+ '''
+ @author: steger, jozsef
+ @summary:
+ This class represents the user request for a data column(s) matching the feature of the column.
+ One can specify the requested unit.
+ '''
+ def __init__(self, feature, unit = None):
+ '''
+ @summary: Constructor
+ @param feature: the feature of the requested column
+ @type feature: string
+ @param unit: the requested unit, default is None, which means no conversion request
+ @type unit: string or None
+ '''
+ Cell.__init__(self)
+ self.feature = feature
+ if unit is not None:
+ self.unit = unit
+
+ def __eq__(self, cell):
+ return self.feature == cell.feature
diff --git a/Monitoring/src/main/python/DataProcessing/DataHeaderCell.py.old b/Monitoring/src/main/python/DataProcessing/DataHeaderCell.py.old
new file mode 100644
index 0000000..11de1a9
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/DataHeaderCell.py.old
@@ -0,0 +1,155 @@
+'''
+Created on Dec 20, 2012
+
+@author: steger, jozsef
+@organization: ELTE
+@contact: steger@complex.elte.hu
+'''
+
+from DataProcessing.Dimension import DimensionManager
+from DataProcessing.DataError import DataError
+from DataProcessing.Unit import UnitManager
+
+class Cell(object):
+ '''
+ @author: steger, jozsef
+ @summary:
+ This class represents meta information of a single table column.
+ It stores the following information:
+ - the name of the cell,
+ - the feature associated to the underlying data,
+ - the dimension of the underlying data,
+ - the unit of the underlying data,
+ '''
+ def __init__(self):
+ self._name = None
+ self._dimension = None
+ self._unit = None
+ self._feature = None
+
+ @property
+ def name(self):
+ if self._name is None:
+ raise DataError("name property is not set")
+ return self._name
+ @name.setter
+ def name(self, name):
+ if not isinstance(name, basestring):
+ raise DataError("name is not a string")
+ if name.count('.'):
+ raise DataError("name must not contain any periods (%s)" % name)
+ if self._name is not None and self._name != name:
+ raise DataError("name property cannot be modified")
+ self._name = name
+
+ @property
+ def dimension(self):
+ if not self._dimension:
+ raise DataError("dimension property is not set")
+ return self._dimension
+ @dimension.setter
+ def dimension(self, dimension):
+ if not isinstance(dimension, DimensionManager.Dimension):
+ raise DataError("dimension is invalid")
+ if self._unit is not None:
+ if not dimension.containsUnit(self._unit):
+ raise DataError("unit %s is not in the basin of dimension %s" % (self.unit, dimension))
+ self._dimension = dimension
+
+ @property
+ def unit(self):
+ if self._unit is None:
+ return self.dimension.unit
+ else:
+ return self._unit
+ @unit.setter
+ def unit(self, unit):
+ if not isinstance(unit, UnitManager.Unit):
+ raise DataError("unit is invalid")
+ if self._dimension is not None:
+ if not self.dimension.containsUnit(unit):
+ raise DataError("unit %s is not in the basin of dimension %s" % (unit, self.dimension))
+ self._unit = unit
+
+ @property
+ def feature(self):
+ if self._feature is None:
+ raise DataError("feature property is not set")
+ return self._feature
+ @feature.setter
+ def feature(self, feature):
+ if self._feature is not None and self._feature != feature:
+ raise DataError("feature property cannot be modified")
+ self._feature = feature
+
+ def __eq__(self, cell):
+ if not isinstance(cell, Cell):
+ raise DataError("type error expecting Cell for comparison")
+ return self._name == cell._name and self._feature == cell._feature and self._unit == cell._unit and self._dimension == cell._dimension
+
+ def __ne__(self, cell):
+ '''
+ @summary: comparison operator of column types.
+ @return: True if column names or their units differ
+ @rtype: boolean
+ '''
+ return not self.__eq__(cell)
+
+
+class DataHeaderCell(Cell):
+ def __init__(self, name, dimension, feature = None, unit = None):
+ Cell.__init__(self)
+ self.name = name
+ self.dimension = dimension
+ if unit is not None:
+ self.unit = unit
+ if feature is not None:
+ self.feature = feature
+
+class CellRequest(Cell): pass
+
+class CellRequestByName(CellRequest):
+ '''
+ @author: steger, jozsef
+ @summary:
+ This class represents the user request for a data column matching the name of the column.
+ One can specify the requested unit.
+ '''
+ def __init__(self, name, unit = None):
+ '''
+ @summary: Constructor
+ @param name: the name of the requested column
+ @type name: string
+ @param unit: the requested unit, default is None, which means no conversion request
+ @type unit: string or None
+ '''
+ Cell.__init__(self)
+ self.name = name
+ if unit is not None:
+ self.unit = unit
+
+ def __eq__(self, cell):
+ return self.name == cell.name
+
+class CellRequestByFeature(CellRequest):
+ '''
+ @author: steger, jozsef
+ @summary:
+ This class represents the user request for a data column(s) matching the feature of the column.
+ One can specify the requested unit.
+ '''
+ def __init__(self, feature, unit = None):
+ '''
+ @summary: Constructor
+ @param feature: the feature of the requested column
+ @type feature: string
+ @param unit: the requested unit, default is None, which means no conversion request
+ @type unit: string or None
+ '''
+ Cell.__init__(self)
+ self.feature = feature
+ if unit is not None:
+ self.unit = unit
+
+ def __eq__(self, cell):
+ return self.feature == cell.feature
diff --git a/Monitoring/src/main/python/DataProcessing/DataIndex.py b/Monitoring/src/main/python/DataProcessing/DataIndex.py
new file mode 100644
index 0000000..84d8390
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/DataIndex.py
@@ -0,0 +1,39 @@
+'''
+Created on Dec 31, 2012
+
+@author: steger
+'''
+from DataProcessing.DataReader import DataReader
+
+class DataIndex(DataReader):
+ '''
+ classdocs
+ '''
+
+ def __init__(self, datasource, key):
+ '''
+ Constructor
+ '''
+ DataReader.__init__(self, datasource)
+ self.indexmap = {}
+ self.extract(cellrequest = key)
+
+ def buildindex(self):
+ i = len(self.indexmap)
+ for k in self:
+ self.indexmap[tuple(k)] = i
+ i += 1
+
+ def __getitem__(self, k):
+ if self.sourceCleared.isSet():
+ self.sourceCleared.clear()
+ self.indexmap.clear()
+ self.buildindex()
+ try:
+ iter(k)
+ except TypeError:
+ k = (k,)
+ if not self.indexmap.has_key(k) and self.sourceExpanded.isSet():
+ self.sourceExpanded.clear()
+ self.buildindex()
+ return self.source._rawrecords[ self.indexmap[k] ] \ No newline at end of file
diff --git a/Monitoring/src/main/python/DataProcessing/DataIndex.py.old b/Monitoring/src/main/python/DataProcessing/DataIndex.py.old
new file mode 100644
index 0000000..84d8390
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/DataIndex.py.old
@@ -0,0 +1,39 @@
+'''
+Created on Dec 31, 2012
+
+@author: steger
+'''
+from DataProcessing.DataReader import DataReader
+
+class DataIndex(DataReader):
+ '''
+ classdocs
+ '''
+
+ def __init__(self, datasource, key):
+ '''
+ Constructor
+ '''
+ DataReader.__init__(self, datasource)
+ self.indexmap = {}
+ self.extract(cellrequest = key)
+
+ def buildindex(self):
+ i = len(self.indexmap)
+ for k in self:
+ self.indexmap[tuple(k)] = i
+ i += 1
+
+ def __getitem__(self, k):
+ if self.sourceCleared.isSet():
+ self.sourceCleared.clear()
+ self.indexmap.clear()
+ self.buildindex()
+ try:
+ iter(k)
+ except TypeError:
+ k = (k,)
+ if not self.indexmap.has_key(k) and self.sourceExpanded.isSet():
+ self.sourceExpanded.clear()
+ self.buildindex()
+ return self.source._rawrecords[ self.indexmap[k] ] \ No newline at end of file
diff --git a/Monitoring/src/main/python/DataProcessing/DataReader$py.class b/Monitoring/src/main/python/DataProcessing/DataReader$py.class
new file mode 100644
index 0000000..36b3714
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/DataReader$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/DataProcessing/DataReader.py b/Monitoring/src/main/python/DataProcessing/DataReader.py
new file mode 100644
index 0000000..1a8c734
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/DataReader.py
@@ -0,0 +1,161 @@
+from __future__ import with_statement
+'''
+Created on Nov 19, 2012
+
+@author: steger
+'''
+from DataProcessing.DataHeader import DataError
+from threading import Event
+from DataProcessing.DataSource import DataSource
+
+class DataReader(object):
+ '''
+ This class is an extension to the DataSource class.
+ It provides an iterator over the rows / records of the DataSource.
+ When the iterator is invoked several times only new records are yielded.
+ In order to access rows, which have already been iterated, use the rewind() method to move the pointer to the first record.
+
+ By default iteration yields all columns.
+ In case user is interested in a specific slice of the table (or wants to retrieve row records on a different column order),
+ they can do so by invoking the extract method, which expects an ordered list of the interesting column names.
+ Besides the column names user may indicate the requested unit, in which case iteration will yield properly transformed data.
+
+ DataReader objects register their ready Event in their Data class in order to catch signal upon new data insertion.
+ '''
+
+ def __init__(self, datasource):
+ '''
+ Constructor
+ @param datasource: the
+ @type datasource: DataSource
+ '''
+ if not isinstance(datasource, DataSource):
+ raise DataError("Expect DataSource, got %s" % datasource)
+ self.source = datasource
+ self._seq = 0
+ self._extractmap = None
+ self._conversionmap = None
+ self.sourceCleared = Event()
+ self.sourceExpanded = Event()
+ datasource.registerReader(self)
+ self.extract()
+
+ def __del__(self):
+ self.source.deregisterReader(self)
+
+ def _get_processedrecords(self):
+ '''
+ @summary: This property indicates how many records are processed by this reader
+ @return: the index of the record iterator
+ @rtype: integer
+ @note: the current value may be unreliable if an iteration is currently carried out
+ '''
+ return self._seq
+ def _set_processedrecords(self, index):
+ '''
+ @summary: set the iterator to a given position. A negative index means rewinding by that many rows
+ @param index: position description
+ @type index: integer
+ '''
+ index = int(index)
+ if index < 0:
+ self._seq = max(0, self._seq + index)
+ else:
+ self._seq = min(index, len(self.source))
+ def _del_processedrecords(self):
+ '''
+ @summary: rewind to the first record row
+ '''
+ self._seq = 0
+
+ def rewind(self):
+ '''
+ @summary: sets the next row record to the first item.
+ '''
+ del self.processedrecords
+ self.sourceCleared.clear()
+
+ def __iter__(self):
+ with self.source.readlock:
+ if self.sourceCleared.isSet() and self.sourceExpanded.isSet():
+ self.sourceCleared.clear()
+ while self._seq < len(self.source):
+ if self.sourceCleared.isSet():
+ raise DataError("Data cleared while reading records %s %s" % (self, self.source))
+ self._seq += 1
+ yield self._extract(self._seq - 1)
+ self.sourceExpanded.clear()
+ raise StopIteration
+
+ def sourcecleared(self):
+ self.sourceCleared.set()
+
+ def sourceexpanded(self):
+ self.sourceExpanded.set()
+
+ def headercells(self):
+ '''
+ @summary: iterator over those columns of the Data which are relevant (i.e. which are extracted)
+ @return: generator
+ @rtype: DataHeaderCell
+ '''
+ for i in self._extractmap:
+ cellname = self.source.header._cellnames[i]
+ yield self.source.header._cells[cellname]
+
+ def extract(self, cellrequest = None):
+ '''
+ @summary: Presets the iterator to the first row record and selects only those columns to show and convert who are referenced in the cell request.
+ This method works in a best effort manner, those column names that are not in this data table are silently omitted.
+ Also in case the unit requested is not allowed by a unit model that column of data is silently ignored.
+ @param cellrequest: the list of the column names and the corresponding unit to show during iteration, default is None which means show all columns without unit conversion
+ @type cellrequest: list of CellRequest
+ '''
+ self._seq = 0
+ if cellrequest is None:
+ s = len(self.source.header._cellnames[:])
+ self._extractmap = range(s)
+ self._conversionmap = [(None, None)] * s
+ else:
+ self._extractmap = []
+ self._conversionmap = []
+ for cellreq in cellrequest:
+ for (colidx, cell) in self.source.header.getCell( cellreq ):
+ try:
+ unit = cell.unit
+ dimension = cell.dimension
+ if cellreq.unit == unit:
+ unitmap = (None, None)
+ elif dimension.containsUnit(cellreq.unit):
+ unitmap = (unit, cellreq.unit)
+ else:
+ raise Exception("unit %s is not in the basin of dimension %s" % (unit, cell.dimension))
+ except DataError:
+ unitmap = (None, None)
+ self._extractmap.append( colidx )
+ self._conversionmap.append( unitmap )
+
+ def _extract(self, idx):
+ '''
+ @summary: an internal helper method that takes care of extracting and ordering the columns in the order predefined by calling the extract method.
+ @param idx: the row index
+ @type idx: integer
+ @return: a list of the cell data slice from the row pointed by the row index
+ @rtype: list
+ '''
+ ret = []
+ i = 0
+ s = len(self._extractmap)
+ D = self.source
+ while i < s:
+ c = self._extractmap[i]
+ celldata = D[idx][c]
+ sourceunit, targetunit = self._conversionmap[i]
+ if sourceunit is None:
+ ret.append( celldata )
+ else:
+ ret.append( self.source.um.convert(celldata, sourceunit, targetunit) )
+ i += 1
+ return ret
+
+ processedrecords = property(_get_processedrecords,_set_processedrecords,_del_processedrecords)
diff --git a/Monitoring/src/main/python/DataProcessing/DataReader.py.old b/Monitoring/src/main/python/DataProcessing/DataReader.py.old
new file mode 100644
index 0000000..24dfd24
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/DataReader.py.old
@@ -0,0 +1,161 @@
+'''
+Created on Nov 19, 2012
+
+@author: steger
+'''
+from DataProcessing.DataHeader import DataError
+from threading import Event
+from DataProcessing.DataSource import DataSource
+
+class DataReader(object):
+ '''
+ This class is an extension to the DataSource class.
+ It provides an iterator over the rows / records of the DataSource.
+ When the iterator is invoked several times only new records are yielded.
+ In order to access rows, which have already been iterated, use the rewind() method to move the pointer to the first record.
+
+ By default iteration yields all columns.
+ In case user is interested in a specific slice of the table (or wants to retrieve row records on a different column order),
+ they can do so by invoking the extract method, which expects an ordered list of the interesting column names.
+ Besides the column names user may indicate the requested unit, in which case iteration will yield properly transformed data.
+
+ DataReader objects register their ready Event in their Data class in order to catch signal upon new data insertion.
+ '''
+
+ def __init__(self, datasource):
+ '''
+ Constructor
+ @param datasource: the
+ @type datasource: DataSource
+ '''
+ if not isinstance(datasource, DataSource):
+ raise DataError("Expect DataSource, got %s" % datasource)
+ self.source = datasource
+ self._seq = 0
+ self._extractmap = None
+ self._conversionmap = None
+ self.sourceCleared = Event()
+ self.sourceExpanded = Event()
+ datasource.registerReader(self)
+ self.extract()
+
+ def __del__(self):
+ self.source.deregisterReader(self)
+
+ @property
+ def processedrecords(self):
+ '''
+ @summary: This property indicates how many records are processed by this reader
+ @return: the index of the record iterator
+ @rtype: integer
+ @note: the current value may be unreliable if an iteration is currently carried out
+ '''
+ return self._seq
+ @processedrecords.setter
+ def processedrecords(self, index):
+ '''
+ @summary: set the iterator to a given position. A negative index means rewinding by that many rows
+ @param index: position description
+ @type index: integer
+ '''
+ index = int(index)
+ if index < 0:
+ self._seq = max(0, self._seq + index)
+ else:
+ self._seq = min(index, len(self.source))
+ @processedrecords.deleter
+ def processedrecords(self):
+ '''
+ @summary: rewind to the first record row
+ '''
+ self._seq = 0
+
+ def rewind(self):
+ '''
+ @summary: sets the next row record to the first item.
+ '''
+ del self.processedrecords
+ self.sourceCleared.clear()
+
+ def __iter__(self):
+ with self.source.readlock:
+ if self.sourceCleared.isSet() and self.sourceExpanded.isSet():
+ self.sourceCleared.clear()
+ while self._seq < len(self.source):
+ if self.sourceCleared.isSet():
+ raise DataError("Data cleared while reading records %s %s" % (self, self.source))
+ self._seq += 1
+ yield self._extract(self._seq - 1)
+ self.sourceExpanded.clear()
+ raise StopIteration
+
+ def sourcecleared(self):
+ self.sourceCleared.set()
+
+ def sourceexpanded(self):
+ self.sourceExpanded.set()
+
+ def headercells(self):
+ '''
+ @summary: iterator over those columns of the Data which are relevant (i.e. which are extracted)
+ @return: generator
+ @rtype: DataHeaderCell
+ '''
+ for i in self._extractmap:
+ cellname = self.source.header._cellnames[i]
+ yield self.source.header._cells[cellname]
+
+ def extract(self, cellrequest = None):
+ '''
+ @summary: Presets the iterator to the first row record and selects only those columns to show and convert who are referenced in the cell request.
+ This method works in a best effort manner, those column names that are not in this data table are silently omitted.
+ Also in case the unit requested is not allowed by a unit model that column of data is silently ignored.
+ @param cellrequest: the list of the column names and the corresponding unit to show during iteration, default is None which means show all columns without unit conversion
+ @type cellrequest: list of CellRequest
+ '''
+ self._seq = 0
+ if cellrequest is None:
+ s = len(self.source.header._cellnames[:])
+ self._extractmap = range(s)
+ self._conversionmap = [(None, None)] * s
+ else:
+ self._extractmap = []
+ self._conversionmap = []
+ for cellreq in cellrequest:
+ for (colidx, cell) in self.source.header.getCell( cellreq ):
+ try:
+ unit = cell.unit
+ dimension = cell.dimension
+ if cellreq.unit == unit:
+ unitmap = (None, None)
+ elif dimension.containsUnit(cellreq.unit):
+ unitmap = (unit, cellreq.unit)
+ else:
+ raise Exception("unit %s is not in the basin of dimension %s" % (unit, cell.dimension))
+ except DataError:
+ unitmap = (None, None)
+ self._extractmap.append( colidx )
+ self._conversionmap.append( unitmap )
+
+ def _extract(self, idx):
+ '''
+ @summary: an internal helper method that takes care of extracting and ordering the columns in the order predefined by calling the extract method.
+ @param idx: the row index
+ @type idx: integer
+ @return: a list of the cell data slice from the row pointed by the row index
+ @rtype: list
+ '''
+ ret = []
+ i = 0
+ s = len(self._extractmap)
+ D = self.source
+ while i < s:
+ c = self._extractmap[i]
+ celldata = D[idx][c]
+ sourceunit, targetunit = self._conversionmap[i]
+ if sourceunit is None:
+ ret.append( celldata )
+ else:
+ ret.append( self.source.um.convert(celldata, sourceunit, targetunit) )
+ i += 1
+ return ret
diff --git a/Monitoring/src/main/python/DataProcessing/DataSource$py.class b/Monitoring/src/main/python/DataProcessing/DataSource$py.class
new file mode 100644
index 0000000..095a4db
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/DataSource$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/DataProcessing/DataSource.py b/Monitoring/src/main/python/DataProcessing/DataSource.py
new file mode 100644
index 0000000..0c66226
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/DataSource.py
@@ -0,0 +1,96 @@
+'''
+Created on Dec 10, 2012
+
+@author: steger
+'''
+from DataProcessing.DataHeader import DataError
+from DataProcessing.DataError import SamplerError
+
+class DataSource(object):
+ def __init__(self, datasource):
+ self._readers = set()
+ self._source = datasource
+ self._data = None
+ self.um = datasource.um
+
+ def _get_source(self):
+ return self._source
+
+ def _get_data(self):
+ self.process()
+ return self._data
+
+ def _get_name(self):
+ raise DataError("%s must implement name property" % self)
+
+ def _get_readlock(self):
+ return self.source.readlock
+
+ def _get_writelock(self):
+ return self.source.writelock
+
+ def __len__(self):
+ raise DataError("%s must implement __len__ method" % self)
+
+ def __getitem__(self, k):
+ raise DataError("%s must implement __getitem__ method" % self)
+
+ def process(self):
+ '''
+ @summary: recursively process data records of the source chain
+ '''
+ if self != self.source:
+ self.source.process()
+ self._process()
+
+ def _process(self):
+ raise DataError("%s must implement _process method" % self)
+
+ def _onclear(self):
+ for r in self._readers:
+ r.sourcecleared()
+
+ def _onexpand(self):
+ for r in self._readers:
+ r.sourceexpanded()
+
+ def registerReader(self, reader):
+ '''
+ @summary: registers a reader to catch clear and update events
+ @param reader: data consumer
+ @type reader: DataReader
+ @raise DataError: wrong argument
+ '''
+ try:
+ self._readers.add(reader)
+ try:
+ if len(self):
+ self._onexpand()
+ else:
+ self._onclear()
+ except SamplerError:
+ pass
+ except AttributeError:
+ self._readers.remove(reader)
+ raise DataError("Expecting a DataReader, got %s" % reader)
+
+ def deregisterReader(self, reader):
+ '''
+ @summary: removes a registered reader
+ @param reader: data consumer
+ @type reader: DataReader
+ '''
+ try:
+ self._readers.remove(reader)
+ except KeyError:
+ pass
+
+ readlock = property(_get_readlock,None,None)
+
+ source = property(_get_source,None,None)
+
+ data = property(_get_data,None,None)
+
+ name = property(_get_name,None,None)
+
+ writelock = property(_get_writelock,None,None)
diff --git a/Monitoring/src/main/python/DataProcessing/DataSource.py.old b/Monitoring/src/main/python/DataProcessing/DataSource.py.old
new file mode 100644
index 0000000..ae3aba0
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/DataSource.py.old
@@ -0,0 +1,91 @@
+'''
+Created on Dec 10, 2012
+
+@author: steger
+'''
+from DataProcessing.DataHeader import DataError
+from DataProcessing.DataError import SamplerError
+
+class DataSource(object):
+ def __init__(self, datasource):
+ self._readers = set()
+ self._source = datasource
+ self._data = None
+ self.um = datasource.um
+
+ @property
+ def source(self):
+ return self._source
+
+ @property
+ def data(self):
+ self.process()
+ return self._data
+
+ @property
+ def name(self):
+ raise DataError("%s must implement name property" % self)
+
+ @property
+ def readlock(self):
+ return self.source.readlock
+
+ @property
+ def writelock(self):
+ return self.source.writelock
+
+ def __len__(self):
+ raise DataError("%s must implement __len__ method" % self)
+
+ def __getitem__(self, k):
+ raise DataError("%s must implement __getitem__ method" % self)
+
+ def process(self):
+ '''
+ @summary: recursively process data records of the source chain
+ '''
+ if self != self.source:
+ self.source.process()
+ self._process()
+
+ def _process(self):
+ raise DataError("%s must implement _process method" % self)
+
+ def _onclear(self):
+ for r in self._readers:
+ r.sourcecleared()
+
+ def _onexpand(self):
+ for r in self._readers:
+ r.sourceexpanded()
+
+ def registerReader(self, reader):
+ '''
+ @summary: registers a reader to catch clear and update events
+ @param reader: data consumer
+ @type reader: DataReader
+ @raise DataError: wrong argument
+ '''
+ try:
+ self._readers.add(reader)
+ try:
+ if len(self):
+ self._onexpand()
+ else:
+ self._onclear()
+ except SamplerError:
+ pass
+ except AttributeError:
+ self._readers.remove(reader)
+ raise DataError("Expecting a DataReader, got %s" % reader)
+
+ def deregisterReader(self, reader):
+ '''
+ @summary: removes a registered reader
+ @param reader: data consumer
+ @type reader: DataReader
+ '''
+ try:
+ self._readers.remove(reader)
+ except KeyError:
+ pass
diff --git a/Monitoring/src/main/python/DataProcessing/Dimension$py.class b/Monitoring/src/main/python/DataProcessing/Dimension$py.class
new file mode 100644
index 0000000..40c85c6
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/Dimension$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/DataProcessing/Dimension.py b/Monitoring/src/main/python/DataProcessing/Dimension.py
new file mode 100644
index 0000000..2fb4ab8
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/Dimension.py
@@ -0,0 +1,306 @@
+from __future__ import with_statement
+'''
+Created on Feb 27, 2012
+
+@author: steger
+'''
+from DataProcessing.Unit import UnitManager
+from DataProcessing.MeasurementLevel import MeasurementLevel, Interval, Ratio
+from DataProcessing.DataError import DimensionError
+
+class DimensionManager(object):
+ '''
+ @summary: the dimension container
+ '''
+ class Dimension(object):
+ def __init__(self, dimensionmanager, reference, name, unit, level):
+ '''
+ @summary: constructor
+ @param dimensionmanager: reference to the dimension manager
+ @type dimensionmanager: DimensionManager
+ @param reference: the reference to the dimension
+ @type reference: str
+ @param symbol: a human readable name of the dimension
+ @type symbol: str
+ @param unit: the default unit of the dimension
+ @type base: Unit
+ @param level: the measurement level of the dimension
+ @type level: MeasurementLevel
+ @note: the level is not a class instance
+ @raise DimensionError: Wrong type of unit / Wrong type of level
+ '''
+ if not isinstance(unit, UnitManager.Unit):
+ raise DimensionError("Wrong type of unit %s" % unit)
+ try:
+ if not issubclass(level, MeasurementLevel):
+ raise DimensionError("Wrong type of level %s" % level)
+ except TypeError:
+ raise DimensionError("Wrong type of level %s" % level)
+ self._data = (dimensionmanager, reference, name, unit)
+ self._level = level
+ def _get_manager(self):
+ return self._data[0]
+ def _get_unitmanager(self):
+ return self._data[0].unitmanager
+ def _get_reference(self):
+ return self._data[1]
+ def _get_name(self):
+ return self._data[2]
+ def _get_unit(self):
+ return self._data[3]
+ def _get_basin(self):
+ return self.unitmanager.getBasinByUnit(self.unit)
+ def level(self, level):
+ '''
+ @summary: check measurement level against the given level
+ @param level: measurement level
+ @type level: MeasurementLevel
+ @return: True if the measurement level given as a parameter
+ is the same or looser than the level of the dimension
+ @rtype: bool
+ @raise DimensionError: Wrong type of level
+ '''
+ if not issubclass(level, MeasurementLevel):
+ raise DimensionError("Wrong type of level %s" % level)
+ return issubclass(self._level, level)
+ def __str__(self):
+ return "%s [%s]" % (self.name, self.unit)
+ def __eq__(self, d):
+ if not isinstance(d, DimensionManager.Dimension):
+ raise DimensionError("wrong type")
+ return self._level == d._level and self.containsUnit(d.unit)
+ def containsUnit(self, unit):
+ '''
+ @summary: checks if a given unit is in the basin of this dimension
+ @param unit: the unit to check
+ @type unit: UnitModel.Unit
+ @return: true if the unit is applicable for this dimension
+ @rtype: bool
+ '''
+ return unit in self.unitmanager.getBasinByUnit(self.unit)
+
+
+ name = property(_get_name,None,None)
+
+ reference = property(_get_reference,None,None)
+
+ manager = property(_get_manager,None,None)
+
+ unitmanager = property(_get_unitmanager,None,None)
+
+ basin = property(_get_basin,None,None)
+
+ unit = property(_get_unit,None,None)
+ class DerivedDimension(Dimension):
+ def ancestors(self):
+ '''
+ @summary: iterate over all ancestors this dimension is derived from
+ @return: generator over ancestors
+ @rtype: Dimension
+ '''
+ for d in self._ancestor:
+ yield d
+
+ class BaseDimension(Dimension): pass
+
+ class DifferenceDimension(DerivedDimension):
+ def __init__(self, dimensionmanager, reference, name, unit, derivedfrom):
+ '''
+ @summary: constructor
+ @param dimensionmanager: reference to the dimension manager
+ @type dimensionmanager: DimensionManager
+ @param reference: the reference to the dimension
+ @type reference: str
+ @param symbol: a human readable name of the dimension
+ @type symbol: str
+ @param unit: the default unit of the dimension
+ @type base: Unit
+ @param derivedfrom: the ancestor dimension this dimension is derived from
+ @type derivedfrom: Dimension
+ @note: the level is not a class instance
+ @raise DimensionError: Wrong type of derivedfrom
+ '''
+ if not isinstance(derivedfrom, DimensionManager.Dimension):
+ raise DimensionError("Wrong type of derivedfrom")
+ if not derivedfrom.level(Interval):
+ raise DimensionError("Cannot subtract %s" % derivedfrom)
+ DimensionManager.Dimension.__init__(self, dimensionmanager, reference, name, unit, Ratio)
+ self._ancestor = derivedfrom
+
+ class PowerDimension(DerivedDimension):
+ def __init__(self, dimensionmanager, reference, name, unit, derivedfrom, exponent):
+ '''
+ @summary: constructor
+ @param dimensionmanager: reference to the dimension manager
+ @type dimensionmanager: DimensionManager
+ @param reference: the reference to the dimension
+ @type reference: str
+ @param symbol: a human readable name of the dimension
+ @type symbol: str
+ @param unit: the default unit of the dimension
+ @type base: Unit
+ @param derivedfrom: the ancestor dimension this dimension is derived from
+ @type derivedfrom: Dimension
+ @param exponent: dimension is a derivative of the derivedfrom dimension, by raising to power exponent
+ @type exponent: int
+ @note: the level is not a class instance
+ @raise DimensionError: Wrong type of derivedfrom / Cannot power
+ '''
+ if not isinstance(derivedfrom, DimensionManager.Dimension):
+ raise DimensionError("Wrong type of derivedfrom")
+ if not derivedfrom.level(Ratio):
+ raise DimensionError("Cannot power %s" % derivedfrom)
+ DimensionManager.Dimension.__init__(self, dimensionmanager, reference, name, unit, Ratio)
+ self._ancestor = (derivedfrom,)
+ self._exponent = exponent
+ def _get_exponent(self): return self.__get_exponent
+
+
+ exponent = property(_get_exponent,None,None)
+ class ProductDimension(DerivedDimension):
+ def __init__(self, dimensionmanager, reference, name, unit, derivedfrom):
+ '''
+ @summary: constructor
+ @param dimensionmanager: reference to the dimension manager
+ @type dimensionmanager: DimensionManager
+ @param reference: the reference to the dimension
+ @type reference: str
+ @param symbol: a human readable name of the dimension
+ @type symbol: str
+ @param unit: the default unit of the dimension
+ @type base: Unit
+ @param derivedfrom: the set of dimensions that compose this dimension
+ @type derivedfrom: tuple(Dimension)
+ @note: the level is not a class instance
+ @raise DimensionError: Wrong type of derivedfrom / ProductDimension is derived from more than 2 Dimensions / Cannot be a factor
+ '''
+ if not isinstance(derivedfrom, tuple):
+ raise DimensionError("Wrong type of derivedfrom")
+ if len(derivedfrom) < 2:
+ raise DimensionError("ProductDimension is derived from more than 2 Dimensions, got %d instead" % len(derivedfrom))
+ for d in derivedfrom:
+ if not d.level(Ratio):
+ raise DimensionError("%s cannot be a factor" % d)
+ DimensionManager.Dimension.__init__(self, dimensionmanager, reference, name, unit, Ratio)
+ self._ancestor = derivedfrom
+
+ class RatioDimension(DerivedDimension):
+ def __init__(self, dimensionmanager, reference, name, unit, derivedfrom):
+ '''
+ @summary: constructor
+ @param dimensionmanager: reference to the dimension manager
+ @type dimensionmanager: DimensionManager
+ @param reference: the reference to the dimension
+ @type reference: str
+ @param symbol: a human readable name of the dimension
+ @type symbol: str
+ @param unit: the default unit of the dimension
+ @type base: Unit
+ @param derivedfrom: the set of dimensions that compose this dimension
+ @type derivedfrom: tuple(Dimension)
+ @note: the level is not a class instance
+ @raise DimensionError: Wrong type of derivedfrom / Cannot be a factor
+ '''
+ if not isinstance(derivedfrom, DimensionManager.Dimension):
+ raise DimensionError("Wrong type of derivedfrom")
+ if not derivedfrom.level(Ratio):
+ raise DimensionError("%s cannot be a factor" % derivedfrom)
+ DimensionManager.Dimension.__init__(self, dimensionmanager, reference, name, unit, Ratio)
+ self._ancestor = (derivedfrom,)
+
+ def __init__(self, unitmanager):
+ '''
+ @summary: constructor
+ @param unitmanager: the unit manager needs to be referenced, to check the basins of a unit
+ @type unitmanager: UnitManager
+ '''
+ self.dimensions = {}
+ self.unitmanager = unitmanager
+
+ def __len__(self):
+ '''
+ @summary: the number of dimension known by the DimensionManager
+ @return: the number of dimension known by the DimensionManager
+ @rtype: int
+ '''
+ return len(self.dimensions)
+
+ def __iter__(self):
+ '''
+ @summary: an iterator over known dimensions
+ @return: the next known dimension
+ @rtype: Dimension
+ '''
+ for d in self.dimensions.values():
+ yield d
+
+ def newBaseDimension(self, reference, name, unit, level):
+ '''
+ @summary: generate a new dimension
+ @param reference: the reference to the dimension
+ @type reference: str
+ @param symbol: a human readable name of the dimension
+ @type symbol: str
+ @param unit: the default unit of the dimension
+ @type base: Unit
+ @param level: the measurement level of the dimension
+ @type level: MeasurementLevel
+ @note: the level is not a class instance
+ @return: the new dimension
+ @rtype: Dimension
+ @raise DimensionError: Dimension with reference already exists / Wrong type of unit / Wrong type of level / Wrong type of dimension /
+ Expecting derivedfrom set / Wrong number of derived from Dimensions
+ '''
+ if self.dimensions.has_key(reference):
+ raise DimensionError("Dimension with reference %s already exists" % reference)
+ dimension = self.BaseDimension(self, reference, name, unit, level)
+ self.dimensions[reference] = dimension
+ return dimension
+
+ def newDerivedDimension(self, reference, name, unit, derivedfrom, dimtype, **kw):
+ '''
+ @summary: generate a new dimension
+ @param reference: the reference to the dimension
+ @type reference: str
+ @param symbol: a human readable name of the dimension
+ @type symbol: str
+ @param unit: the default unit of the dimension
+ @type base: Unit
+ @note: the level is not a class instance
+ @param derivedfrom: the set of dimensions that compose this dimension
+ @type derivedfrom: tuple(Dimension) or Dimension
+ @param dimtype: possible dimension types are DimensionManager.DifferenceDimension,
+ DimensionManager.PowerDimension, DimensionManager.ProductDimension, DimensionManager.RatioDimension
+ @note: dimtype parameter is not an instance, but a class scheme
+ @type dimtype: Dimension
+ @return: the new dimension
+ @rtype: Dimension
+ @keyword kw: PowerDimension expects an integer valued parameter: exponent
+ @raise DimensionError: Dimension with reference already exists / Wrong type of dimension
+ '''
+ if self.dimensions.has_key(reference):
+ raise DimensionError("Dimension with reference %s already exists" % reference)
+ if issubclass(dimtype, self.DifferenceDimension)or issubclass(dimtype, self.ProductDimension) or issubclass(dimtype, self.RatioDimension):
+ dimension = dimtype(self, reference, name, unit, derivedfrom)
+ elif issubclass(dimtype, self.PowerDimension):
+ dimension = dimtype(self, reference, name, unit, derivedfrom, kw.get('exponent'))
+ else:
+ raise DimensionError("Wrong type of dimension %s" % dimtype)
+ self.dimensions[reference] = dimension
+ return dimension
+
+ def __getitem__(self, reference):
+ '''
+ @summary: look up the prefix in the DimensionManager based on its reference
+ @param reference: the reference to the dimension
+ @type reference: str
+ @return: the dimension if found
+ @rtype: Dimension
+ @raise DimensionError: Dimension with reference not found
+ '''
+ if not self.dimensions.has_key(reference):
+ for k in self.dimensions.keys():
+ print k,",",
+ print "."
+ raise DimensionError("Dimension with reference %s not found" % reference)
+ return self.dimensions[reference]
diff --git a/Monitoring/src/main/python/DataProcessing/Dimension.py.old b/Monitoring/src/main/python/DataProcessing/Dimension.py.old
new file mode 100644
index 0000000..f5bfd52
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/Dimension.py.old
@@ -0,0 +1,295 @@
+'''
+Created on Feb 27, 2012
+
+@author: steger
+'''
+from DataProcessing.Unit import UnitManager
+from DataProcessing.MeasurementLevel import MeasurementLevel, Interval, Ratio
+from DataProcessing.DataError import DimensionError
+
+class DimensionManager(object):
+ '''
+ @summary: the dimension container
+ '''
+ class Dimension(object):
+ def __init__(self, dimensionmanager, reference, name, unit, level):
+ '''
+ @summary: constructor
+ @param dimensionmanager: reference to the dimension manager
+ @type dimensionmanager: DimensionManager
+ @param reference: the reference to the dimension
+ @type reference: str
+ @param symbol: a human readable name of the dimension
+ @type symbol: str
+ @param unit: the default unit of the dimension
+ @type base: Unit
+ @param level: the measurement level of the dimension
+ @type level: MeasurementLevel
+ @note: the level is not a class instance
+ @raise DimensionError: Wrong type of unit / Wrong type of level
+ '''
+ if not isinstance(unit, UnitManager.Unit):
+ raise DimensionError("Wrong type of unit %s" % unit)
+ try:
+ if not issubclass(level, MeasurementLevel):
+ raise DimensionError("Wrong type of level %s" % level)
+ except TypeError:
+ raise DimensionError("Wrong type of level %s" % level)
+ self._data = (dimensionmanager, reference, name, unit)
+ self._level = level
+ @property
+ def manager(self):
+ return self._data[0]
+ @property
+ def unitmanager(self):
+ return self._data[0].unitmanager
+ @property
+ def reference(self):
+ return self._data[1]
+ @property
+ def name(self):
+ return self._data[2]
+ @property
+ def unit(self):
+ return self._data[3]
+ @property
+ def basin(self):
+ return self.unitmanager.getBasinByUnit(self.unit)
+ def level(self, level):
+ '''
+ @summary: check measurement level against the given level
+ @param level: measurement level
+ @type level: MeasurementLevel
+ @return: True if the measurement level given as a parameter
+ is the same or looser than the level of the dimension
+ @rtype: bool
+ @raise DimensionError: Wrong type of level
+ '''
+ if not issubclass(level, MeasurementLevel):
+ raise DimensionError("Wrong type of level %s" % level)
+ return issubclass(self._level, level)
+ def __str__(self):
+ return "%s [%s]" % (self.name, self.unit)
+ def __eq__(self, d):
+ if not isinstance(d, DimensionManager.Dimension):
+ raise DimensionError("wrong type")
+ return self._level == d._level and self.containsUnit(d.unit)
+ def containsUnit(self, unit):
+ '''
+ @summary: checks if a given unit is in the basin of this dimension
+ @param unit: the unit to check
+ @type unit: UnitModel.Unit
+ @return: true if the unit is applicable for this dimension
+ @rtype: bool
+ '''
+ return unit in self.unitmanager.getBasinByUnit(self.unit)
+
+ class DerivedDimension(Dimension):
+ def ancestors(self):
+ '''
+ @summary: iterate over all ancestors this dimension is derived from
+ @return: generator over ancestors
+ @rtype: Dimension
+ '''
+ for d in self._ancestor:
+ yield d
+
+ class BaseDimension(Dimension): pass
+
+ class DifferenceDimension(DerivedDimension):
+ def __init__(self, dimensionmanager, reference, name, unit, derivedfrom):
+ '''
+ @summary: constructor
+ @param dimensionmanager: reference to the dimension manager
+ @type dimensionmanager: DimensionManager
+ @param reference: the reference to the dimension
+ @type reference: str
+ @param symbol: a human readable name of the dimension
+ @type symbol: str
+ @param unit: the default unit of the dimension
+ @type base: Unit
+ @param derivedfrom: the ancestor dimension this dimension is derived from
+ @type derivedfrom: Dimension
+ @note: the level is not a class instance
+ @raise DimensionError: Wrong type of derivedfrom
+ '''
+ if not isinstance(derivedfrom, DimensionManager.Dimension):
+ raise DimensionError("Wrong type of derivedfrom")
+ if not derivedfrom.level(Interval):
+ raise DimensionError("Cannot subtract %s" % derivedfrom)
+ DimensionManager.Dimension.__init__(self, dimensionmanager, reference, name, unit, Ratio)
+ self._ancestor = derivedfrom
+
+ class PowerDimension(DerivedDimension):
+ def __init__(self, dimensionmanager, reference, name, unit, derivedfrom, exponent):
+ '''
+ @summary: constructor
+ @param dimensionmanager: reference to the dimension manager
+ @type dimensionmanager: DimensionManager
+ @param reference: the reference to the dimension
+ @type reference: str
+ @param symbol: a human readable name of the dimension
+ @type symbol: str
+ @param unit: the default unit of the dimension
+ @type base: Unit
+ @param derivedfrom: the ancestor dimension this dimension is derived from
+ @type derivedfrom: Dimension
+ @param exponent: dimension is a derivative of the derivedfrom dimension, by raising to power exponent
+ @type exponent: int
+ @note: the level is not a class instance
+ @raise DimensionError: Wrong type of derivedfrom / Cannot power
+ '''
+ if not isinstance(derivedfrom, DimensionManager.Dimension):
+ raise DimensionError("Wrong type of derivedfrom")
+ if not derivedfrom.level(Ratio):
+ raise DimensionError("Cannot power %s" % derivedfrom)
+ DimensionManager.Dimension.__init__(self, dimensionmanager, reference, name, unit, Ratio)
+ self._ancestor = (derivedfrom,)
+ self._exponent = exponent
+ @property
+ def exponent(self): return self._exponent
+
+ class ProductDimension(DerivedDimension):
+ def __init__(self, dimensionmanager, reference, name, unit, derivedfrom):
+ '''
+ @summary: constructor
+ @param dimensionmanager: reference to the dimension manager
+ @type dimensionmanager: DimensionManager
+ @param reference: the reference to the dimension
+ @type reference: str
+ @param symbol: a human readable name of the dimension
+ @type symbol: str
+ @param unit: the default unit of the dimension
+ @type base: Unit
+ @param derivedfrom: the set of dimensions that compose this dimension
+ @type derivedfrom: tuple(Dimension)
+ @note: the level is not a class instance
+ @raise DimensionError: Wrong type of derivedfrom / ProductDimension is derived from more than 2 Dimensions / Cannot be a factor
+ '''
+ if not isinstance(derivedfrom, tuple):
+ raise DimensionError("Wrong type of derivedfrom")
+ if len(derivedfrom) < 2:
+ raise DimensionError("ProductDimension is derived from more than 2 Dimensions, got %d instead" % len(derivedfrom))
+ for d in derivedfrom:
+ if not d.level(Ratio):
+ raise DimensionError("%s cannot be a factor" % d)
+ DimensionManager.Dimension.__init__(self, dimensionmanager, reference, name, unit, Ratio)
+ self._ancestor = derivedfrom
+
+ class RatioDimension(DerivedDimension):
+ def __init__(self, dimensionmanager, reference, name, unit, derivedfrom):
+ '''
+ @summary: constructor
+ @param dimensionmanager: reference to the dimension manager
+ @type dimensionmanager: DimensionManager
+ @param reference: the reference to the dimension
+ @type reference: str
+ @param symbol: a human readable name of the dimension
+ @type symbol: str
+ @param unit: the default unit of the dimension
+ @type base: Unit
+ @param derivedfrom: the set of dimensions that compose this dimension
+ @type derivedfrom: tuple(Dimension)
+ @note: the level is not a class instance
+ @raise DimensionError: Wrong type of derivedfrom / Cannot be a factor
+ '''
+ if not isinstance(derivedfrom, DimensionManager.Dimension):
+ raise DimensionError("Wrong type of derivedfrom")
+ if not derivedfrom.level(Ratio):
+ raise DimensionError("%s cannot be a factor" % derivedfrom)
+ DimensionManager.Dimension.__init__(self, dimensionmanager, reference, name, unit, Ratio)
+ self._ancestor = (derivedfrom,)
+
+ def __init__(self, unitmanager):
+ '''
+ @summary: constructor
+ @param unitmanager: the unit manager needs to be referenced, to check the basins of a unit
+ @type unitmanager: UnitManager
+ '''
+ self.dimensions = {}
+ self.unitmanager = unitmanager
+
+ def __len__(self):
+ '''
+ @summary: the number of dimension known by the DimensionManager
+ @return: the number of dimension known by the DimensionManager
+ @rtype: int
+ '''
+ return len(self.dimensions)
+
+ def __iter__(self):
+ '''
+ @summary: an iterator over known dimensions
+ @return: the next known dimension
+ @rtype: Dimension
+ '''
+ for d in self.dimensions.values():
+ yield d
+
+ def newBaseDimension(self, reference, name, unit, level):
+ '''
+ @summary: generate a new dimension
+ @param reference: the reference to the dimension
+ @type reference: str
+ @param symbol: a human readable name of the dimension
+ @type symbol: str
+ @param unit: the default unit of the dimension
+ @type base: Unit
+ @param level: the measurement level of the dimension
+ @type level: MeasurementLevel
+ @note: the level is not a class instance
+ @return: the new dimension
+ @rtype: Dimension
+ @raise DimensionError: Dimension with reference already exists / Wrong type of unit / Wrong type of level / Wrong type of dimension /
+ Expecting derivedfrom set / Wrong number of derived from Dimensions
+ '''
+ if self.dimensions.has_key(reference):
+ raise DimensionError("Dimension with reference %s already exists" % reference)
+ dimension = self.BaseDimension(self, reference, name, unit, level)
+ self.dimensions[reference] = dimension
+ return dimension
+
+ def newDerivedDimension(self, reference, name, unit, derivedfrom, dimtype, **kw):
+ '''
+ @summary: generate a new dimension
+ @param reference: the reference to the dimension
+ @type reference: str
+ @param symbol: a human readable name of the dimension
+ @type symbol: str
+ @param unit: the default unit of the dimension
+ @type base: Unit
+ @note: the level is not a class instance
+ @param derivedfrom: the set of dimensions that compose this dimension
+ @type derivedfrom: tuple(Dimension) or Dimension
+ @param dimtype: possible dimension types are DimensionManager.DifferenceDimension,
+ DimensionManager.PowerDimension, DimensionManager.ProductDimension, DimensionManager.RatioDimension
+ @note: dimtype parameter is not an instance, but a class scheme
+ @type dimtype: Dimension
+ @return: the new dimension
+ @rtype: Dimension
+ @keyword kw: PowerDimension expects an integer valued parameter: exponent
+ @raise DimensionError: Dimension with reference already exists / Wrong type of dimension
+ '''
+ if self.dimensions.has_key(reference):
+ raise DimensionError("Dimension with reference %s already exists" % reference)
+ if issubclass(dimtype, self.DifferenceDimension)or issubclass(dimtype, self.ProductDimension) or issubclass(dimtype, self.RatioDimension):
+ dimension = dimtype(self, reference, name, unit, derivedfrom)
+ elif issubclass(dimtype, self.PowerDimension):
+ dimension = dimtype(self, reference, name, unit, derivedfrom, kw.get('exponent'))
+ else:
+ raise DimensionError("Wrong type of dimension %s" % dimtype)
+ self.dimensions[reference] = dimension
+ return dimension
+
+ def __getitem__(self, reference):
+ '''
+ @summary: look up the prefix in the DimensionManager based on its reference
+ @param reference: the reference to the dimension
+ @type reference: str
+ @return: the dimension if found
+ @rtype: Dimension
+ @raise DimensionError: Dimension with reference not found
+ '''
+ if not self.dimensions.has_key(reference):
+ raise DimensionError("Dimension with reference %s not found" % reference)
+ return self.dimensions[reference]
diff --git a/Monitoring/src/main/python/DataProcessing/MeasurementLevel$py.class b/Monitoring/src/main/python/DataProcessing/MeasurementLevel$py.class
new file mode 100644
index 0000000..321a5c1
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/MeasurementLevel$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/DataProcessing/MeasurementLevel.py b/Monitoring/src/main/python/DataProcessing/MeasurementLevel.py
new file mode 100644
index 0000000..4e3d702
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/MeasurementLevel.py
@@ -0,0 +1,46 @@
+'''
+Created on Mar 22, 2012
+
+@author: steger, jozsef
+
+@summary: Class representation of the measurement levels (aka measurement scale) defined by Stanley Smith Stevens.
+Stevens proposed his theory in a 1946 Science article titled "On the theory of scales of measurement".
+@note: These classes are not meant to be instantiated ever.
+'''
+
+class MeasurementLevel:
+ '''
+ @summary: It serves as the common scheme for the measurement levels. Only its subclasses have a meaning.
+ '''
+ pass
+
+class Nominal(MeasurementLevel):
+ '''
+ @summary: Values of this kind of measurement are mere elements of a set.
+ '''
+ pass
+
+class Ordinal(Nominal):
+ '''
+ @summary: A ranking is defined between the values of this kind of measurement.
+ '''
+ pass
+
+class Interval(Ordinal):
+ '''
+ @summary: A difference is defined which can be evaluated for any two values of this kind of measurement.
+ '''
+ pass
+
+class Ratio(Interval):
+ '''
+ @summary: There is a reference value defined for this kind of measurement, that is "zero" has a meaning.
+ '''
+ pass
+
+lut_level = {
+ 'NominalLevel': Nominal,
+ 'OrdinalLevel': Ordinal,
+ 'IntervalLevel': Interval,
+ 'RatioLevel': Ratio,
+}
diff --git a/Monitoring/src/main/python/DataProcessing/MeasurementLevel.py.old b/Monitoring/src/main/python/DataProcessing/MeasurementLevel.py.old
new file mode 100644
index 0000000..4e3d702
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/MeasurementLevel.py.old
@@ -0,0 +1,46 @@
+'''
+Created on Mar 22, 2012
+
+@author: steger, jozsef
+
+@summary: Class representation of the measurement levels (aka measurement scale) defined by Stanley Smith Stevens.
+Stevens proposed his theory in a 1946 Science article titled "On the theory of scales of measurement".
+@note: These classes are not meant to be instantiated ever.
+'''
+
+class MeasurementLevel:
+ '''
+ @summary: It serves as the common scheme for the measurement levels. Only its subclasses have a meaning.
+ '''
+ pass
+
+class Nominal(MeasurementLevel):
+ '''
+ @summary: Values of this kind of measurement are mere elements of a set.
+ '''
+ pass
+
+class Ordinal(Nominal):
+ '''
+ @summary: A ranking is defined between the values of this kind of measurement.
+ '''
+ pass
+
+class Interval(Ordinal):
+ '''
+ @summary: A difference is defined which can be evaluated for any two values of this kind of measurement.
+ '''
+ pass
+
+class Ratio(Interval):
+ '''
+ @summary: There is a reference value defined for this kind of measurement, that is "zero" has a meaning.
+ '''
+ pass
+
+lut_level = {
+ 'NominalLevel': Nominal,
+ 'OrdinalLevel': Ordinal,
+ 'IntervalLevel': Interval,
+ 'RatioLevel': Ratio,
+}
diff --git a/Monitoring/src/main/python/DataProcessing/Parameter$py.class b/Monitoring/src/main/python/DataProcessing/Parameter$py.class
new file mode 100644
index 0000000..19b9068
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/Parameter$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/DataProcessing/Parameter.py b/Monitoring/src/main/python/DataProcessing/Parameter.py
new file mode 100644
index 0000000..d0f2026
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/Parameter.py
@@ -0,0 +1,283 @@
+from __future__ import with_statement
+'''
+Created on Oct 20, 2011
+
+@author: steger, jozsef
+@organization: ELTE
+@contact: steger@complex.elte.hu
+'''
+
+from DataProcessing.Dimension import DimensionManager
+from DataProcessing.DataError import ParameterError
+
+class Parameter(object):
+ '''
+ @author: steger, jozsef
+ @summary:
+ This class represents the control parameters of a monitoring task.
+ '''
+
+ def __init__(self, name, valuetype, unitmanager, dimension, default = None):
+ '''
+ @summary: Constructor
+ @param name: the name of the parameter
+ @type name: str
+ @param valuetype: the type of the parameter (used when reading value information)
+ @type valuetype: type
+@fixme: docs
+ @param default: the preset unit aware value of the parameter
+ @type default: a tuple of value and unit
+ '''
+ self.um = unitmanager
+ if not isinstance(dimension, DimensionManager.Dimension):
+ raise ParameterError("wrong type of dimension")
+ self._data = (name, valuetype, dimension)
+ self._value = None
+ if default is not None:
+ self.value = default
+
+ def __str__(self):
+ if self._value is None:
+ return "%s (%s)" % (self.name, self.dimension)
+ else:
+ return "%s (%s) = %s [%s] as %s" % (self.name, self.dimension.name, self._value[0], self._value[1], self.valuetype)
+
+ def _get_name(self):
+ return self._data[0]
+
+ def _get_valuetype(self):
+ return self._data[1]
+
+ def _get_dimension(self):
+ return self._data[2]
+
+ def _get_value(self):
+ return self._value
+ def _set_value(self, value):
+ _, unit = value
+ if not self.dimension.containsUnit(unit):
+ raise ParameterError("Unit %s is not in the basin of the dimension %s" % (unit, self.dimension))
+ self._value = tuple(value)
+ def _del_value(self):
+ self._value = None
+
+ def copy(self):
+ return Parameter(name = self.name, valuetype = self.valuetype, unitmanager = self.um, dimension = self.dimension, default = self.value)
+
+ def convert(self, unit):
+ '''
+ @summary: returns the value of the given parameter in the required unit
+ @param unit: the requested unit, which must adhere to the unit model of this parameter
+ @type unit: Unit
+ @return: the parameter value represented in the requested units
+ @rtype:
+ @raise ParameterError: Unit not in dimension basin / Unit is not initialized
+ '''
+ if not self.dimension.containsUnit(unit):
+ raise ParameterError("Unit %s is not in the basin of the dimension %s" % (unit, self.dimension))
+ if self._value is None:
+ raise ParameterError("%s is not initialized" % self)
+ val, un = self._value
+ if unit == un:
+ return self.valuetype(val)
+ else:
+ return self.valuetype( self.um.convert(value = val, from_unit = un, to_unit = unit) )
+
+ def convertToReferencedUnit(self, unitreference):
+ '''
+ @summary: returns the parameter value in units, where the unit is referenced
+ @param unitreference: the reference to the requested unit, which must adhere to the unit model of this parameter
+ @type unit: str
+ '''
+ return self.convert( self.um[unitreference] )
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ valuetype = property(_get_valuetype,None,None)
+
+ name = property(_get_name,None,None)
+
+ value = property(_get_value,_set_value,_del_value)
+
+ dimension = property(_get_dimension,None,None)
+class ParameterList(object):
+ '''
+ @author: steger, jozsef
+ @summary:
+ This class represents a list of control parameters of a monitoring task.
+ '''
+
+ def __init__(self, parameterlist = []):
+ '''
+ @summary: Constructor
+ @param parameterlist: a list of parameters to handle together
+ @type parameterlist: list(Parameter) or ParameterList
+ '''
+ self.parameter = {}
+ self.extend(parameterlist)
+
+ def __str__(self):
+ '''
+ '''
+ return "<ParameterList> [%s\n\t]" % "\n\t\t".join([ "%s," % (p) for p in self.parameter.values() ])
+
+ def __len__(self):
+ '''
+ @summary: return the size of the parameter list
+ @return: the size of the parameter list
+ @rtype: integer
+ '''
+ return len(self.parameter)
+
+ def __iter__(self):
+ '''
+ @summary: provide an iterator over all the parameter elements
+ @return: the next parameter
+ @rtype: Parameter
+ '''
+ for p in self.parameter.values():
+ yield p
+
+ def append(self, p):
+ '''
+ @summary: append a new Parameter to the parameter list. If a wrong type of parameter is given, silently discard it.
+ In case a parameter with the same name exists overwrite its value only.
+ @param p: a new parameter to add or an existing parameter to update former values
+ @type p: Parameter
+ '''
+ if not isinstance(p, Parameter):
+ print "WW: %s is not a parameter" % str(p)
+ return
+ if self.has_key(p.name):
+ print "WW: parameter with name %s is updated" % p.name
+ self.parameter[p.name].value = p.value
+ else:
+ self.parameter[p.name] = p
+
+ def has_key(self, name):
+ '''
+ @summary: Check if a parameter with a given name is already in the list
+ @param name: the name of the parameter looking for
+ @type name: str
+ '''
+ return self.parameter.has_key(name)
+
+ def get(self, name, unit):
+ '''
+ @summary: Read the parameter pointed by a given name in the required unit
+ @param name: the name of the parameter
+ @type name: str
+ @param unit: the target unit the caller wants the named parameter to be expressed in
+ @type unit: Unit
+ @raise ParameterError: no such parameter name
+ '''
+ if not self.has_key(name):
+ raise ParameterError("No Parameter with name: %s" % name)
+ return self.parameter[name].convert(unit)
+
+ def getInReferencedUnits(self, name, unitreference):
+ '''
+ @summary: Read the parameter pointed by a given name in the required unit
+ @param name: the name of the parameter
+ @type name: str
+ @param unitreference: the target unit the caller wants the named parameter to be expressed in
+ @type unitreference: str
+ @raise ParameterError: no such parameter name
+ '''
+ if not self.has_key(name):
+ raise ParameterError("No Parameter with name: %s" % name)
+ return self.parameter[name].convertToReferencedUnit(unitreference)
+
+ def update(self, name, value, unit):
+ '''
+ @summary: reset the value of the parameter with the given name
+ @param name: the name of the parameter to update
+ @type name: str
+ @param value: the new value
+ @type value: depends on the Parameter.type
+ @param unit: the new unit
+ @type unit: Unit
+ '''
+ self.parameter[name].value = value, unit
+
+ def updateInReferencedUnits(self, name, value, unitreference):
+ '''
+ @summary: reset the value of the parameter with the given name
+ @param name: the name of the parameter to update
+ @type name: str
+ @param value: the new value
+ @type value: depends on the Parameter.type
+ @param unitreference: the new unit
+ @type unitreference: str
+ '''
+ p = self.parameter[name]
+ p.value = value, p.um[unitreference]
+
+ def update_by_list(self, p_updating):
+ '''
+ @summary: update parameter list with matching elements of another parameter list
+ @param p_updating: parameter list, whose matching elements update the element of this list
+ @type p_updating: ParameterList
+ @raise ParameterError: wrong argument type
+ '''
+ if not isinstance(p_updating, ParameterList):
+ raise ParameterError("wrong argument type")
+ for name in p_updating.parameter_names():
+ if self.has_key(name):
+ v = p_updating.parameter[name].value
+ if v is not None:
+ self.parameter[name].value = v
+
+ def clear(self):
+ '''
+ @summary: Empty the parameter list
+ '''
+ self.parameter.clear()
+
+ def copy(self):
+ return ParameterList( map(lambda p: p.copy(), self) )
+
+ def extend(self, parameterlist):
+ '''
+ @summary: extends this parameter list with the items of another parameter list
+ @param paramlist: the list of parameter items to extend with
+ @type paramlist: ParameterList
+ '''
+ for p in parameterlist:
+ self.append(p)
+
+ def parameter_names(self):
+ '''
+ @summary: List the names of the currently hold parameters
+ @return: list of Parameter.name
+ @rtype: list
+ '''
+ return self.parameter.keys()
+
+ def formkeyvaldict(self):
+ return dict( [ (name, p.value[0]) for (name, p) in self.parameter.iteritems() ] )
diff --git a/Monitoring/src/main/python/DataProcessing/Parameter.py.old b/Monitoring/src/main/python/DataProcessing/Parameter.py.old
new file mode 100644
index 0000000..e5bce4f
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/Parameter.py.old
@@ -0,0 +1,280 @@
+'''
+Created on Oct 20, 2011
+
+@author: steger, jozsef
+@organization: ELTE
+@contact: steger@complex.elte.hu
+'''
+
+from DataProcessing.Dimension import DimensionManager
+from DataProcessing.DataError import ParameterError
+
+class Parameter(object):
+ '''
+ @author: steger, jozsef
+ @summary:
+ This class represents the control parameters of a monitoring task.
+ '''
+
+ def __init__(self, name, valuetype, unitmanager, dimension, default = None):
+ '''
+ @summary: Constructor
+ @param name: the name of the parameter
+ @type name: str
+ @param valuetype: the type of the parameter (used when reading value information)
+ @type valuetype: type
+@fixme: docs
+ @param default: the preset unit aware value of the parameter
+ @type default: a tuple of value and unit
+ '''
+ self.um = unitmanager
+ if not isinstance(dimension, DimensionManager.Dimension):
+ raise ParameterError("wrong type of dimension")
+ self._data = (name, valuetype, dimension)
+ self._value = None
+ if default is not None:
+ self.value = default
+
+ def __str__(self):
+ if self._value is None:
+ return "%s (%s)" % (self.name, self.dimension)
+ else:
+ return "%s (%s) = %s [%s] as %s" % (self.name, self.dimension.name, self._value[0], self._value[1], self.valuetype)
+
+ @property
+ def name(self):
+ return self._data[0]
+
+ @property
+ def valuetype(self):
+ return self._data[1]
+
+ @property
+ def dimension(self):
+ return self._data[2]
+
+ @property
+ def value(self):
+ return self._value
+ @value.setter
+ def value(self, value):
+ _, unit = value
+ if not self.dimension.containsUnit(unit):
+ raise ParameterError("Unit %s is not in the basin of the dimension %s" % (unit, self.dimension))
+ self._value = tuple(value)
+ @value.deleter
+ def value(self):
+ self._value = None
+
+ def copy(self):
+ return Parameter(name = self.name, valuetype = self.valuetype, unitmanager = self.um, dimension = self.dimension, default = self.value)
+
+ def convert(self, unit):
+ '''
+ @summary: returns the value of the given parameter in the required unit
+ @param unit: the requested unit, which must adhere to the unit model of this parameter
+ @type unit: Unit
+ @return: the parameter value represented in the requested units
+ @rtype:
+ @raise ParameterError: Unit not in dimension basin / Unit is not initialized
+ '''
+ if not self.dimension.containsUnit(unit):
+ raise ParameterError("Unit %s is not in the basin of the dimension %s" % (unit, self.dimension))
+ if self._value is None:
+ raise ParameterError("%s is not initialized" % self)
+ val, un = self._value
+ if unit == un:
+ return self.valuetype(val)
+ else:
+ return self.valuetype( self.um.convert(value = val, from_unit = un, to_unit = unit) )
+
+ def convertToReferencedUnit(self, unitreference):
+ '''
+ @summary: returns the parameter value in units, where the unit is referenced
+ @param unitreference: the reference to the requested unit, which must adhere to the unit model of this parameter
+ @type unit: str
+ '''
+ return self.convert( self.um[unitreference] )
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+class ParameterList(object):
+ '''
+ @author: steger, jozsef
+ @summary:
+ This class represents a list of control parameters of a monitoring task.
+ '''
+
+ def __init__(self, parameterlist = []):
+ '''
+ @summary: Constructor
+ @param parameterlist: a list of parameters to handle together
+ @type parameterlist: list(Parameter) or ParameterList
+ '''
+ self.parameter = {}
+ self.extend(parameterlist)
+
+ def __str__(self):
+ '''
+ '''
+ return "<ParameterList> [%s\n\t]" % "\n\t\t".join([ "%s," % (p) for p in self.parameter.values() ])
+
+ def __len__(self):
+ '''
+ @summary: return the size of the parameter list
+ @return: the size of the parameter list
+ @rtype: integer
+ '''
+ return len(self.parameter)
+
+ def __iter__(self):
+ '''
+ @summary: provide an iterator over all the parameter elements
+ @return: the next parameter
+ @rtype: Parameter
+ '''
+ for p in self.parameter.values():
+ yield p
+
+ def append(self, p):
+ '''
+ @summary: append a new Parameter to the parameter list. If a wrong type of parameter is given, silently discard it.
+ In case a parameter with the same name exists overwrite its value only.
+ @param p: a new parameter to add or an existing parameter to update former values
+ @type p: Parameter
+ '''
+ if not isinstance(p, Parameter):
+ print "WW: %s is not a parameter" % str(p)
+ return
+ if self.has_key(p.name):
+ print "WW: parameter with name %s is updated" % p.name
+ self.parameter[p.name].value = p.value
+ else:
+ self.parameter[p.name] = p
+
+ def has_key(self, name):
+ '''
+ @summary: Check if a parameter with a given name is already in the list
+ @param name: the name of the parameter looking for
+ @type name: str
+ '''
+ return self.parameter.has_key(name)
+
+ def get(self, name, unit):
+ '''
+ @summary: Read the parameter pointed by a given name in the required unit
+ @param name: the name of the parameter
+ @type name: str
+ @param unit: the target unit the caller wants the named parameter to be expressed in
+ @type unit: Unit
+ @raise ParameterError: no such parameter name
+ '''
+ if not self.has_key(name):
+ raise ParameterError("No Parameter with name: %s" % name)
+ return self.parameter[name].convert(unit)
+
+ def getInReferencedUnits(self, name, unitreference):
+ '''
+ @summary: Read the parameter pointed by a given name in the required unit
+ @param name: the name of the parameter
+ @type name: str
+ @param unitreference: the target unit the caller wants the named parameter to be expressed in
+ @type unitreference: str
+ @raise ParameterError: no such parameter name
+ '''
+ if not self.has_key(name):
+ raise ParameterError("No Parameter with name: %s" % name)
+ return self.parameter[name].convertToReferencedUnit(unitreference)
+
+ def update(self, name, value, unit):
+ '''
+ @summary: reset the value of the parameter with the given name
+ @param name: the name of the parameter to update
+ @type name: str
+ @param value: the new value
+ @type value: depends on the Parameter.type
+ @param unit: the new unit
+ @type unit: Unit
+ '''
+ self.parameter[name].value = value, unit
+
+ def updateInReferencedUnits(self, name, value, unitreference):
+ '''
+ @summary: reset the value of the parameter with the given name
+ @param name: the name of the parameter to update
+ @type name: str
+ @param value: the new value
+ @type value: depends on the Parameter.type
+ @param unitreference: the new unit
+ @type unitreference: str
+ '''
+ p = self.parameter[name]
+ p.value = value, p.um[unitreference]
+
+ def update_by_list(self, p_updating):
+ '''
+ @summary: update parameter list with matching elements of another parameter list
+ @param p_updating: parameter list, whose matching elements update the element of this list
+ @type p_updating: ParameterList
+ @raise ParameterError: wrong argument type
+ '''
+ if not isinstance(p_updating, ParameterList):
+ raise ParameterError("wrong argument type")
+ for name in p_updating.parameter_names():
+ if self.has_key(name):
+ v = p_updating.parameter[name].value
+ if v is not None:
+ self.parameter[name].value = v
+
+ def clear(self):
+ '''
+ @summary: Empty the parameter list
+ '''
+ self.parameter.clear()
+
+ def copy(self):
+ return ParameterList( map(lambda p: p.copy(), self) )
+
+ def extend(self, parameterlist):
+ '''
+ @summary: extends this parameter list with the items of another parameter list
+ @param paramlist: the list of parameter items to extend with
+ @type paramlist: ParameterList
+ '''
+ for p in parameterlist:
+ self.append(p)
+
+ def parameter_names(self):
+ '''
+ @summary: List the names of the currently hold parameters
+ @return: list of Parameter.name
+ @rtype: list
+ '''
+ return self.parameter.keys()
+
+ def formkeyvaldict(self):
+ return dict( [ (name, p.value[0]) for (name, p) in self.parameter.iteritems() ] )
diff --git a/Monitoring/src/main/python/DataProcessing/Prefix$py.class b/Monitoring/src/main/python/DataProcessing/Prefix$py.class
new file mode 100644
index 0000000..8d2d956
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/Prefix$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/DataProcessing/Prefix.py b/Monitoring/src/main/python/DataProcessing/Prefix.py
new file mode 100644
index 0000000..9778f87
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/Prefix.py
@@ -0,0 +1,119 @@
+from __future__ import with_statement
+'''
+Created on Feb 27, 2012
+
+@author: steger
+'''
+from DataProcessing.DataError import PrefixError
+
+class PrefixManager(object):
+ '''
+ @summary: the unit prefix container
+ '''
+ class Prefix(object):
+ def __init__(self, reference, symbol, base, exponent):
+ '''
+ @summary: constructor
+ @param reference: the reference to the unit prefix
+ @type reference: str
+ @param symbol: a short form of the unit prefix
+ @type symbol: str
+ @param base: the base of the unit prefix, typically 2 or 10
+ @type base: int
+ @param exponent: the exponent of the unit prefix
+ @type exponent: int
+ '''
+ scale = base ** exponent
+ self._data = (reference, symbol, base, exponent, scale)
+ def __str__(self):
+ return self.symbol
+ def _get_reference(self):
+ return self._data[0]
+ def _get_symbol(self):
+ return self._data[1]
+ def _get_base(self):
+ return self._data[2]
+ def _get_exponent(self):
+ return self._data[3]
+ def _get_scale(self):
+ return self._data[4]
+
+
+ symbol = property(_get_symbol,None,None)
+
+ base = property(_get_base,None,None)
+
+ exponent = property(_get_exponent,None,None)
+
+ reference = property(_get_reference,None,None)
+
+ scale = property(_get_scale,None,None)
+ def __init__(self):
+ '''
+ @summary: constructor
+ '''
+ self.prefixes = {}
+ self.duplicatesymbols = set()
+
+ def __contains__(self, item):
+ '''
+ @summary: check the existence of a unit prefix
+ @param item: a unit prefix or its symbol
+ @type item: Prefix or str
+ @return: True if the prefix is known by the PrefixManager
+ @rtype: bool
+ @raise PrefixError: Wrong item type
+ '''
+ if isinstance(item, self.Prefix):
+ return item in self.prefixes.values()
+ elif isinstance(item, str):
+ for prefix in self.prefixes.values():
+ if prefix.symbol == item:
+ return True
+ return False
+ else:
+ raise PrefixError("Wrong item type %s" % item)
+
+ def __len__(self):
+ '''
+ @summary: the number of prefixes known by the PrefixManager
+ @return: the number of prefixes known by the PrefixManager
+ @rtype: int
+ '''
+ return len(self.prefixes)
+
+ def newPrefix(self, reference, symbol, base, exponent):
+ '''
+ @summary: generate a new unit prefix
+ @param reference: the reference to the unit prefix
+ @type reference: str
+ @param symbol: a short form of the unit prefix
+ @type symbol: str
+ @param base: the base of the unit prefix, typically 2 or 10
+ @type base: int
+ @param exponent: the exponent of the unit prefix
+ @type exponent: int
+ @return: the new unit prefix
+ @rtype: Prefix
+ @raise PrefixError: Prefix with reference exists
+ '''
+ if self.prefixes.has_key(reference):
+ raise PrefixError("Prefix with reference %s already exists" % reference)
+ if PrefixManager.__contains__(self, symbol):
+ self.duplicatesymbols.add(symbol)
+ prefix = self.Prefix(reference, symbol, base, exponent)
+ self.prefixes[reference] = prefix
+ return prefix
+
+ def __getitem__(self, reference):
+ '''
+ @summary: look up the prefix in the PrefixManager based on its reference
+ @param reference: the reference to the unit prefix
+ @type reference: str
+ @return: the unit prefix found
+ @rtype: Prefix
+ @raise PrefixError: Prefix with reference not found
+ '''
+ if self.prefixes.has_key(reference):
+ return self.prefixes[reference]
+ raise PrefixError("Prefix with reference %s not found" % reference)
diff --git a/Monitoring/src/main/python/DataProcessing/Prefix.py.old b/Monitoring/src/main/python/DataProcessing/Prefix.py.old
new file mode 100644
index 0000000..62885e4
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/Prefix.py.old
@@ -0,0 +1,113 @@
+'''
+Created on Feb 27, 2012
+
+@author: steger
+'''
+from DataProcessing.DataError import PrefixError
+
+class PrefixManager(object):
+ '''
+ @summary: the unit prefix container
+ '''
+ class Prefix(object):
+ def __init__(self, reference, symbol, base, exponent):
+ '''
+ @summary: constructor
+ @param reference: the reference to the unit prefix
+ @type reference: str
+ @param symbol: a short form of the unit prefix
+ @type symbol: str
+ @param base: the base of the unit prefix, typically 2 or 10
+ @type base: int
+ @param exponent: the exponent of the unit prefix
+ @type exponent: int
+ '''
+ scale = base ** exponent
+ self._data = (reference, symbol, base, exponent, scale)
+ def __str__(self):
+ return self.symbol
+ @property
+ def reference(self):
+ return self._data[0]
+ @property
+ def symbol(self):
+ return self._data[1]
+ @property
+ def base(self):
+ return self._data[2]
+ @property
+ def exponent(self):
+ return self._data[3]
+ @property
+ def scale(self):
+ return self._data[4]
+
+ def __init__(self):
+ '''
+ @summary: constructor
+ '''
+ self.prefixes = {}
+ self.duplicatesymbols = set()
+
+ def __contains__(self, item):
+ '''
+ @summary: check the existence of a unit prefix
+ @param item: a unit prefix or its symbol
+ @type item: Prefix or str
+ @return: True if the prefix is known by the PrefixManager
+ @rtype: bool
+ @raise PrefixError: Wrong item type
+ '''
+ if isinstance(item, self.Prefix):
+ return item in self.prefixes.values()
+ elif isinstance(item, str):
+ for prefix in self.prefixes.values():
+ if prefix.symbol == item:
+ return True
+ return False
+ else:
+ raise PrefixError("Wrong item type %s" % item)
+
+ def __len__(self):
+ '''
+ @summary: the number of prefixes known by the PrefixManager
+ @return: the number of prefixes known by the PrefixManager
+ @rtype: int
+ '''
+ return len(self.prefixes)
+
+ def newPrefix(self, reference, symbol, base, exponent):
+ '''
+ @summary: generate a new unit prefix
+ @param reference: the reference to the unit prefix
+ @type reference: str
+ @param symbol: a short form of the unit prefix
+ @type symbol: str
+ @param base: the base of the unit prefix, typically 2 or 10
+ @type base: int
+ @param exponent: the exponent of the unit prefix
+ @type exponent: int
+ @return: the new unit prefix
+ @rtype: Prefix
+ @raise PrefixError: Prefix with reference exists
+ '''
+ if self.prefixes.has_key(reference):
+ raise PrefixError("Prefix with reference %s already exists" % reference)
+ if PrefixManager.__contains__(self, symbol):
+ self.duplicatesymbols.add(symbol)
+ prefix = self.Prefix(reference, symbol, base, exponent)
+ self.prefixes[reference] = prefix
+ return prefix
+
+ def __getitem__(self, reference):
+ '''
+ @summary: look up the prefix in the PrefixManager based on its reference
+ @param reference: the reference to the unit prefix
+ @type reference: str
+ @return: the unit prefix found
+ @rtype: Prefix
+ @raise PrefixError: Prefix with reference not found
+ '''
+ if self.prefixes.has_key(reference):
+ return self.prefixes[reference]
+ raise PrefixError("Prefix with reference %s not found" % reference)
diff --git a/Monitoring/src/main/python/DataProcessing/Sampler$py.class b/Monitoring/src/main/python/DataProcessing/Sampler$py.class
new file mode 100644
index 0000000..ae1b48b
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/Sampler$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/DataProcessing/Sampler.py b/Monitoring/src/main/python/DataProcessing/Sampler.py
new file mode 100644
index 0000000..023e8c8
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/Sampler.py
@@ -0,0 +1,195 @@
+'''
+Created on Nov 20, 2012
+
+@author: steger
+'''
+from DataProcessing.DataReader import DataReader
+from DataProcessing.DataHeader import DataHeader
+from DataProcessing.DataSource import DataSource
+from DataProcessing.Data import Data
+from DataProcessing.DataError import SamplerError
+
+class Sampler(DataSource):
+ '''
+ classdocs
+ '''
+
+ def __init__(self, datasource):
+ '''
+ Constructor
+ @param datasource: table of records to manipulate with
+ @type datasource: DataSource
+ '''
+ if not isinstance(datasource, DataSource):
+ raise SamplerError("Wrong type of datasource %s" % datasource)
+ DataSource.__init__(self, datasource)
+ self._reader = DataReader(datasource = datasource._data)
+ header = DataHeader("%sSample(%s)" % (self.name, self.source.name))
+ for c in self._reader.headercells():
+ header.addColumn(c)
+ self._data = Data(self.um, header)
+
+ def _get_header(self):
+ return self._data.header
+
+ def __len__(self):
+ self.process()
+ return len(self._data)
+
+ def __getitem__(self, k):
+ return self._data._rawrecords.__getitem__(k)
+
+
+
+ header = property(_get_header,None,None)
+class Head(Sampler):
+ def __init__(self, datasource, head = 10):
+ '''
+ Constructor
+ @param datasource: table of records to manipulate with
+ @type datasource: DataSource
+ @param head: the top n records of the table
+ @type head: int
+ '''
+ Sampler.__init__(self, datasource)
+ self._head = head
+
+ def _get_name(self):
+ return "Head"
+
+ def _get_head(self):
+ return self._head
+ def _set_head(self, head):
+ self._head = int(head)
+ self._data.clear()
+ self._reader.rewind()
+
+ def _process(self):
+ if self._reader.sourceCleared.isSet():
+ self._reader.sourceCleared.clear()
+ self._reader.rewind()
+ self._data.clear()
+ if len(self._data) == self.head:
+ return
+ for x in self._reader:
+ self._data._rawrecords.append(x)
+ if len(self._data) == self.head:
+ self._data._onexpand()
+ return
+ raise SamplerError("Not enough sample %d/%d" % (len(self._data), self.head))
+
+
+ head = property(_get_head,_set_head,None)
+
+ name = property(_get_name,None,None)
+class Tail(Sampler):
+ def __init__(self, datasource, tail = 10):
+ '''
+ Constructor
+ @param datasource: table of records to manipulate with
+ @type datasource: DataSource
+ @param tail: the last n records of the table
+ @type tail: int
+ '''
+ Sampler.__init__(self, datasource)
+ self._tail = tail
+
+ def _get_name(self):
+ return "Tail"
+
+ def _get_tail(self):
+ return self._tail
+ def _set_tail(self, tail):
+ self._tail = int(tail)
+ self._data.clear()
+ self._reader.rewind()
+
+ def _process(self):
+ clear = False
+ if self._reader.sourceCleared.isSet():
+ self._reader.sourceCleared.clear()
+ self._reader.rewind()
+ self._data.clear()
+ for x in self._reader:
+ if len(self._data) == self.tail:
+ self._data._rawrecords.pop(0)
+ clear = True
+ self._data._rawrecords.append(x)
+ if clear:
+ self._data._onclear()
+ if len(self._data) == self.tail:
+ self._data._onexpand()
+ else:
+ raise SamplerError("Not enough sample %d/%d" % (len(self._data), self.tail))
+
+
+ tail = property(_get_tail,_set_tail,None)
+
+ name = property(_get_name,None,None)
+class Sorter(Sampler):
+ def __init__(self, datasource, keycell = None, ascending = True):
+ '''
+ Constructor
+ @param datasource: table of records to manipulate with
+ @type datasource: DataSource
+ @param keycell: the key column to use for sorting
+ @type keycell: CellRequest or None
+ @param ascending: indicate the sortin order
+ @type ascending: bool
+ '''
+ Sampler.__init__(self, datasource)
+ self._asc = ascending
+ self._key = 0
+ if keycell:
+ self.keycell = keycell
+
+ def _get_name(self):
+ return "Sort"
+
+ def _get_ascending(self):
+ return self._asc
+ def _set_ascending(self, ascending):
+ if bool(ascending) != self._asc:
+ self._asc = bool(ascending)
+ self._data.clear()
+ self._reader.rewind()
+
+ def _get_key(self):
+ return self._key
+ def _set_key(self, key):
+ self._key = int(key)
+
+ def _get_keycell(self):
+ raise SamplerError("don't read this property")
+ def _set_keycell(self, cellrequest):
+ for idx, _ in self.source.header.getCell(cellrequest):
+ self._key = idx
+ break
+
+ def _process(self):
+ clear = False
+ if self._reader.sourceCleared.isSet():
+ self._reader.sourceCleared.clear()
+ clear = True
+ if self._reader.sourceExpanded.isSet():
+ self._reader.sourceExpanded.clear()
+ clear = True
+ if not clear:
+ return
+ self._reader.rewind()
+ self._data.clear()
+ self._data._rawrecords = sorted(self._reader.source._rawrecords, key=lambda r: r[self.key], reverse = not self.ascending)
+ if len(self._data):
+ self._data._onclear()
+ self._data._onexpand()
+ else:
+ raise SamplerError("Not enough sample...")
+
+
+ keycell = property(_get_keycell,_set_keycell,None)
+
+ name = property(_get_name,None,None)
+
+ key = property(_get_key,_set_key,None)
+
+ ascending = property(_get_ascending,_set_ascending,None)
diff --git a/Monitoring/src/main/python/DataProcessing/Sampler.py.old b/Monitoring/src/main/python/DataProcessing/Sampler.py.old
new file mode 100644
index 0000000..f1c0622
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/Sampler.py.old
@@ -0,0 +1,191 @@
+'''
+Created on Nov 20, 2012
+
+@author: steger
+'''
+from DataProcessing.DataReader import DataReader
+from DataProcessing.DataHeader import DataHeader
+from DataProcessing.DataSource import DataSource
+from DataProcessing.Data import Data
+from DataProcessing.DataError import SamplerError
+
+class Sampler(DataSource):
+ '''
+ classdocs
+ '''
+
+ def __init__(self, datasource):
+ '''
+ Constructor
+ @param datasource: table of records to manipulate with
+ @type datasource: DataSource
+ '''
+ if not isinstance(datasource, DataSource):
+ raise SamplerError("Wrong type of datasource %s" % datasource)
+ DataSource.__init__(self, datasource)
+ self._reader = DataReader(datasource = datasource._data)
+ header = DataHeader("%sSample(%s)" % (self.name, self.source.name))
+ for c in self._reader.headercells():
+ header.addColumn(c)
+ self._data = Data(self.um, header)
+
+ @property
+ def header(self):
+ return self._data.header
+
+ def __len__(self):
+ self.process()
+ return len(self._data)
+
+ def __getitem__(self, k):
+ return self._data._rawrecords.__getitem__(k)
+
+
+class Head(Sampler):
+ def __init__(self, datasource, head = 10):
+ '''
+ Constructor
+ @param datasource: table of records to manipulate with
+ @type datasource: DataSource
+ @param head: the top n records of the table
+ @type head: int
+ '''
+ Sampler.__init__(self, datasource)
+ self._head = head
+
+ @property
+ def name(self):
+ return "Head"
+
+ @property
+ def head(self):
+ return self._head
+ @head.setter
+ def head(self, head):
+ self._head = int(head)
+ self._data.clear()
+ self._reader.rewind()
+
+ def _process(self):
+ if self._reader.sourceCleared.isSet():
+ self._reader.sourceCleared.clear()
+ self._reader.rewind()
+ self._data.clear()
+ if len(self._data) == self.head:
+ return
+ for x in self._reader:
+ self._data._rawrecords.append(x)
+ if len(self._data) == self.head:
+ self._data._onexpand()
+ return
+ raise SamplerError("Not enough sample %d/%d" % (len(self._data), self.head))
+
+class Tail(Sampler):
+ def __init__(self, datasource, tail = 10):
+ '''
+ Constructor
+ @param datasource: table of records to manipulate with
+ @type datasource: DataSource
+ @param tail: the last n records of the table
+ @type tail: int
+ '''
+ Sampler.__init__(self, datasource)
+ self._tail = tail
+
+ @property
+ def name(self):
+ return "Tail"
+
+ @property
+ def tail(self):
+ return self._tail
+ @tail.setter
+ def tail(self, tail):
+ self._tail = int(tail)
+ self._data.clear()
+ self._reader.rewind()
+
+ def _process(self):
+ clear = False
+ if self._reader.sourceCleared.isSet():
+ self._reader.sourceCleared.clear()
+ self._reader.rewind()
+ self._data.clear()
+ for x in self._reader:
+ if len(self._data) == self.tail:
+ self._data._rawrecords.pop(0)
+ clear = True
+ self._data._rawrecords.append(x)
+ if clear:
+ self._data._onclear()
+ if len(self._data) == self.tail:
+ self._data._onexpand()
+ else:
+ raise SamplerError("Not enough sample %d/%d" % (len(self._data), self.tail))
+
+class Sorter(Sampler):
+ def __init__(self, datasource, keycell = None, ascending = True):
+ '''
+ Constructor
+ @param datasource: table of records to manipulate with
+ @type datasource: DataSource
+ @param keycell: the key column to use for sorting
+ @type keycell: CellRequest or None
+ @param ascending: indicate the sortin order
+ @type ascending: bool
+ '''
+ Sampler.__init__(self, datasource)
+ self._asc = ascending
+ self._key = 0
+ if keycell:
+ self.keycell = keycell
+
+ @property
+ def name(self):
+ return "Sort"
+
+ @property
+ def ascending(self):
+ return self._asc
+ @ascending.setter
+ def ascending(self, ascending):
+ if bool(ascending) != self._asc:
+ self._asc = bool(ascending)
+ self._data.clear()
+ self._reader.rewind()
+
+ @property
+ def key(self):
+ return self._key
+ @key.setter
+ def key(self, key):
+ self._key = int(key)
+
+ @property
+ def keycell(self):
+ raise SamplerError("don't read this property")
+ @keycell.setter
+ def keycell(self, cellrequest):
+ for idx, _ in self.source.header.getCell(cellrequest):
+ self._key = idx
+ break
+
+ def _process(self):
+ clear = False
+ if self._reader.sourceCleared.isSet():
+ self._reader.sourceCleared.clear()
+ clear = True
+ if self._reader.sourceExpanded.isSet():
+ self._reader.sourceExpanded.clear()
+ clear = True
+ if not clear:
+ return
+ self._reader.rewind()
+ self._data.clear()
+ self._data._rawrecords = sorted(self._reader.source._rawrecords, key=lambda r: r[self.key], reverse = not self.ascending)
+ if len(self._data):
+ self._data._onclear()
+ self._data._onexpand()
+ else:
+ raise SamplerError("Not enough sample...")
+
diff --git a/Monitoring/src/main/python/DataProcessing/Unit$py.class b/Monitoring/src/main/python/DataProcessing/Unit$py.class
new file mode 100644
index 0000000..23036eb
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/Unit$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/DataProcessing/Unit.py b/Monitoring/src/main/python/DataProcessing/Unit.py
new file mode 100644
index 0000000..7c010ac
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/Unit.py
@@ -0,0 +1,298 @@
+from __future__ import with_statement
+'''
+Created on Oct 19, 2011
+
+@author: steger, jozsef
+@organization: ELTE
+@contact: steger@complex.elte.hu
+'''
+from DataProcessing.DataError import UnitError
+
+class UnitManager(object):
+ '''
+ @summary: the unit container
+ The web that describes the derivation path between different units are not stored,
+ because most of that information can be inferred from the dimension derivations.
+ The UnitManager differentiates between BasicUnit and DerivedUnit.
+ BasicUnits form the set of BaseUnit, ProductUnit, PowerUnit as referred to in the information model.
+ Whereas DerivedUnits are made up of LinearTransformedUnit and RegexpScaledUnit as referenced in the information model.
+ Units that are formed by prepending a unit prefix are also DerivedUnits.
+ '''
+
+ class Unit(object):
+ def __init__(self, manager, reference, symbol, ancestor):
+ self._data = (manager, reference, symbol)
+ self._ancestor = ancestor
+ def _get_manager(self):
+ return self._data[0]
+ def _get_reference(self):
+ return self._data[1]
+ def _get_symbol(self):
+ return self._data[2]
+ def __str__(self):
+ return self.symbol
+ def __eq__(self, u):
+ return self._data == u._data
+
+
+ manager = property(_get_manager,None,None)
+
+ symbol = property(_get_symbol,None,None)
+
+ reference = property(_get_reference,None,None)
+ class BasicUnit(Unit):
+ def __init__(self, manager, reference, symbol):
+ '''
+ @summary: constructor
+ A BasicUnit is an instance of either set of BaseUnit, ProductUnit and PowerUnit as of the information model.
+ @param manager: a reference to the unit manager
+ @type manager: UnitManager
+ @param reference: the reference to the unit
+ @type reference: str
+ @param symbol: an abbreviation for the unit
+ @type symbol: str
+ '''
+ UnitManager.Unit.__init__(self, manager, reference, symbol, None)
+
+ class DerivedUnit(Unit):
+ def __init__(self, manager, reference, symbol, ancestor):
+ '''
+ @summary: constructor
+ A DerivedUnit is an instance of either set of LinearTransformedUnit and RegexpScaledUnit as of the information model.
+ Also units that have any unit prefix fall in this set.
+ @param manager: a reference to the unit manager
+ @type manager: UnitManager
+ @param reference: the reference to the unit
+ @type reference: str
+ @param symbol: an abbreviation for the unit
+ @type symbol: str
+ @param ancestor: the neighbor unit, whose derivative this instance is.
+ @type ancestor: Unit
+ '''
+ UnitManager.Unit.__init__(self, manager, reference, symbol, ancestor)
+
+
+ def __init__(self):
+ '''
+ @summary: constructor
+ '''
+ self.units = {}
+ self.conversionpaths = {}
+ self.basins = {}
+ self.duplicatesymbols = set()
+
+ def __contains__(self, item):
+ '''
+ @summary: check the existence of a unit
+ @param item: a unit or its symbol
+ @type item: Unit or str
+ @return: True if the unit is known by the UnitManager
+ @rtype: bool
+ @raise UnitError: Wrong item type
+ '''
+ units = set(self.units.values())
+ if isinstance(item, self.Unit):
+ return item in units
+ elif isinstance(item, str):
+ for unit in units:
+ if unit.symbol == item:
+ return True
+ return False
+ else:
+ raise UnitError("Wrong item type %s" % item)
+
+ def __len__(self):
+ '''
+ @summary: the number of units known by the UnitManager
+ @return: the number of units known by the UnitManager
+ @rtype: int
+ '''
+ return len(self.units)
+
+ @staticmethod
+ def intORfloat(x):
+ '''
+ @summary: a conversion helper to read out a value as a number
+ @param x: a number
+ @type x: str
+ @return: the number converted to integer or floating point decimal
+ @rtype: int or float
+ '''
+ if isinstance(x, str):
+ try:
+ return int(x)
+ except ValueError:
+ return float(x)
+ else:
+ return float(x)
+
+ def __getitem__(self, reference):
+ '''
+ @summary: look up the unit in the UnitManager using its reference
+ @param reference: the reference to the unit
+ @type reference: str
+ @return: the unit found
+ @rtype: Unit
+ @raise UnitError: Unit with reference not found
+ '''
+ if self.units.has_key(reference):
+ return self.units[reference]
+ raise UnitError("Unit with reference %s not found" % reference)
+
+ def newBasicUnit(self, reference, symbol):
+ '''
+ @summary: generate a new basic unit
+ @param reference: the reference to the unit
+ @type reference: str
+ @param symbol: a short form of the unit
+ @type symbol: str
+ @return: the new unit
+ @rtype: BasicUnit
+ @raise UnitError: Unit with reference exists
+ '''
+ if self.units.has_key(reference):
+ raise UnitError("Unit with reference %s exists" % reference)
+ if UnitManager.__contains__(self, symbol):
+ self.duplicatesymbols.add(symbol)
+ unit = self.BasicUnit(self, reference, symbol)
+ self.units[reference] = unit
+ self.basins[unit] = set([unit])
+ self.__dict__[reference] = unit
+ return unit
+
+ def addLinearTransformedUnit(self, reference, symbol, derivedfrom, scale, offset = 0):
+ '''
+ @summary: generate a derived basic unit
+ @param reference: the reference to the unit
+ @type reference: str
+ @param symbol: a short form of the unit
+ @type symbol: str
+ @param derivedfrom: the neighbor unit
+ @type derivedfrom: Unit
+ @param scale: scaling factor for the linear transformation
+ @type scale: float
+ @param offset: the shift in the linear transformation, defaults to 0
+ @type offset: float
+ @return: the new unit
+ @rtype: DerivedUnit
+ @raise UnitError: Wrong type of derivedfrom / Unit not found / Unit with reference exists / Cannot extend basin with unit, because Unit not found
+ '''
+ if not isinstance(derivedfrom, self.Unit):
+ raise UnitError("Wrong type of derivedfrom %s" % derivedfrom)
+ if not UnitManager.__contains__(self, str(derivedfrom)):
+ raise UnitError("Unit %s not found" % derivedfrom)
+ if self.units.has_key(reference):
+ raise UnitError("Unit with reference %s exists" % reference)
+ unit = self.DerivedUnit(self, reference, symbol, derivedfrom)
+ basic = derivedfrom
+ while basic._ancestor:
+ basic = basic._ancestor
+ if not self.basins.has_key(basic):
+ raise UnitError("Cannot extend basin with unit %s, because Unit %s not found" % (unit, basic))
+ if UnitManager.__contains__(self, symbol):
+ self.duplicatesymbols.add(symbol)
+ self.units[reference] = unit
+ self.conversionpaths[(unit, derivedfrom)] = (self.op_lt_forward, (scale, offset))
+ self.conversionpaths[(derivedfrom, unit)] = (self.op_lt_inverse, (scale, offset))
+ self.basins[basic].add(unit)
+ self.__dict__[reference] = unit
+ return unit
+
+ def addRegexpTransformedUnit(self, reference, symbol, derivedfrom, expr_forward, expr_inverse):
+ '''
+ @summary: generate a new basic unit
+ @param reference: the reference to the unit
+ @type reference: str
+ @param symbol: a short form of the unit
+ @type symbol: str
+ @param derivedfrom: the neighbor unit
+ @type derivedfrom: Unit
+ @param expr_forward: the expression driving the forward transformation
+ @type expr_forward: str
+ @param expr_inverse: the expression driving the inverse transformation
+ @type expr_inverse: str
+ @return: the new unit
+ @rtype: DerivedUnit
+ @raise UnitError: Wrong type of derivedfrom / Unit not found / Unit with reference exists / Cannot extend basin with unit, because Unit not found
+ '''
+ if not isinstance(derivedfrom, self.Unit):
+ raise UnitError("Wrong type of derivedfrom %s" % derivedfrom)
+ if not UnitManager.__contains__(self, str(derivedfrom)):
+ raise UnitError("Unit %s not found" % derivedfrom)
+ if self.units.has_key(reference):
+ raise UnitError("Unit with reference %s exists" % reference)
+ unit = self.DerivedUnit(self, reference, symbol, derivedfrom)
+ basic = derivedfrom
+ while basic._ancestor:
+ basic = basic._ancestor
+ if not self.basins.has_key(basic):
+ raise UnitError("Cannot extend basin with unit %s, because Unit %s not found" % (unit, basic))
+ if UnitManager.__contains__(self, symbol):
+ self.duplicatesymbols.add(symbol)
+ self.units[reference] = unit
+ self.conversionpaths[(unit, derivedfrom)] = (self.op_rt_forward, expr_forward)
+ self.conversionpaths[(derivedfrom, unit)] = (self.op_rt_inverse, expr_inverse)
+ self.basins[basic].add(unit)
+ self.__dict__[reference] = unit
+ return unit
+
+ def getBasinByUnit(self, unit):
+ for basin in self.basins.values():
+ if unit in basin:
+ return basin
+ raise UnitError("Basin for unit %s not found" % unit)
+
+ def getBasinByReference(self, reference):
+ try:
+ unit = self[reference]
+ return self.getBasinByUnit(unit)
+ except UnitError:
+ raise UnitError("Basin for unit reference %s not found" % reference)
+
+ def op_lt_forward(self, value, (scale, offset)):
+ def op(value):
+ return scale * self.intORfloat( value ) + offset
+ if isinstance(value, list):
+ return map(lambda x: op(x), value)
+ return op(value)
+
+ def op_lt_inverse(self, value, (scale, offset)):
+ def op(value):
+ return (self.intORfloat( value ) - offset) / float(scale)
+ if isinstance(value, list):
+ return map(lambda x: op(x), value)
+ return op(value)
+
+ def op_rt_forward(self, value, expression):
+ def op(value):
+ raise UnitError("not implemented")
+ if isinstance(value, list):
+ return map(lambda x: op(x), value)
+ return op(value)
+
+ op_rt_inverse = op_rt_forward
+
+ def convert(self, value, from_unit, to_unit):
+ if not UnitManager.__contains__(self, str(from_unit)):
+ raise UnitError("Unknown from_unit")
+ if not UnitManager.__contains__(self, str(to_unit)):
+ raise UnitError("Unknown to_unit")
+ if from_unit == to_unit:
+ return value
+
+ while from_unit._ancestor:
+ op, oparg = self.conversionpaths[(from_unit, from_unit._ancestor)]
+ value = op(value, oparg)
+ from_unit = from_unit._ancestor
+ heap = []
+ while to_unit._ancestor:
+ op, oparg = self.conversionpaths[(to_unit._ancestor, to_unit)]
+ heap.append((op, oparg))
+ to_unit = to_unit._ancestor
+ if from_unit != to_unit:
+ raise UnitError("Different base units %s %s" % (from_unit, to_unit))
+ while len(heap):
+ op, oparg = heap.pop(0)
+ value = op(value, oparg)
+ return value
+
diff --git a/Monitoring/src/main/python/DataProcessing/Unit.py.old b/Monitoring/src/main/python/DataProcessing/Unit.py.old
new file mode 100644
index 0000000..87a9836
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/Unit.py.old
@@ -0,0 +1,294 @@
+'''
+Created on Oct 19, 2011
+
+@author: steger, jozsef
+@organization: ELTE
+@contact: steger@complex.elte.hu
+'''
+from DataProcessing.DataError import UnitError
+
+class UnitManager(object):
+ '''
+ @summary: the unit container
+ The web that describes the derivation path between different units are not stored,
+ because most of that information can be inferred from the dimension derivations.
+ The UnitManager differentiates between BasicUnit and DerivedUnit.
+ BasicUnits form the set of BaseUnit, ProductUnit, PowerUnit as referred to in the information model.
+ Whereas DerivedUnits are made up of LinearTransformedUnit and RegexpScaledUnit as referenced in the information model.
+ Units that are formed by prepending a unit prefix are also DerivedUnits.
+ '''
+
+ class Unit(object):
+ def __init__(self, manager, reference, symbol, ancestor):
+ self._data = (manager, reference, symbol)
+ self._ancestor = ancestor
+ @property
+ def manager(self):
+ return self._data[0]
+ @property
+ def reference(self):
+ return self._data[1]
+ @property
+ def symbol(self):
+ return self._data[2]
+ def __str__(self):
+ return self.symbol
+ def __eq__(self, u):
+ return self._data == u._data
+
+ class BasicUnit(Unit):
+ def __init__(self, manager, reference, symbol):
+ '''
+ @summary: constructor
+ A BasicUnit is an instance of either set of BaseUnit, ProductUnit and PowerUnit as of the information model.
+ @param manager: a reference to the unit manager
+ @type manager: UnitManager
+ @param reference: the reference to the unit
+ @type reference: str
+ @param symbol: an abbreviation for the unit
+ @type symbol: str
+ '''
+ UnitManager.Unit.__init__(self, manager, reference, symbol, None)
+
+ class DerivedUnit(Unit):
+ def __init__(self, manager, reference, symbol, ancestor):
+ '''
+ @summary: constructor
+ A DerivedUnit is an instance of either set of LinearTransformedUnit and RegexpScaledUnit as of the information model.
+ Also units that have any unit prefix fall in this set.
+ @param manager: a reference to the unit manager
+ @type manager: UnitManager
+ @param reference: the reference to the unit
+ @type reference: str
+ @param symbol: an abbreviation for the unit
+ @type symbol: str
+ @param ancestor: the neighbor unit, whose derivative this instance is.
+ @type ancestor: Unit
+ '''
+ UnitManager.Unit.__init__(self, manager, reference, symbol, ancestor)
+
+
+ def __init__(self):
+ '''
+ @summary: constructor
+ '''
+ self.units = {}
+ self.conversionpaths = {}
+ self.basins = {}
+ self.duplicatesymbols = set()
+
+ def __contains__(self, item):
+ '''
+ @summary: check the existence of a unit
+ @param item: a unit or its symbol
+ @type item: Unit or str
+ @return: True if the unit is known by the UnitManager
+ @rtype: bool
+ @raise UnitError: Wrong item type
+ '''
+ units = set(self.units.values())
+ if isinstance(item, self.Unit):
+ return item in units
+ elif isinstance(item, str):
+ for unit in units:
+ if unit.symbol == item:
+ return True
+ return False
+ else:
+ raise UnitError("Wrong item type %s" % item)
+
+ def __len__(self):
+ '''
+ @summary: the number of units known by the UnitManager
+ @return: the number of units known by the UnitManager
+ @rtype: int
+ '''
+ return len(self.units)
+
+ @staticmethod
+ def intORfloat(x):
+ '''
+ @summary: a conversion helper to read out a value as a number
+ @param x: a number
+ @type x: str
+ @return: the number converted to integer or floating point decimal
+ @rtype: int or float
+ '''
+ if isinstance(x, str):
+ try:
+ return int(x)
+ except ValueError:
+ return float(x)
+ else:
+ return float(x)
+
+ def __getitem__(self, reference):
+ '''
+ @summary: look up the unit in the UnitManager using its reference
+ @param reference: the reference to the unit
+ @type reference: str
+ @return: the unit found
+ @rtype: Unit
+ @raise UnitError: Unit with reference not found
+ '''
+ if self.units.has_key(reference):
+ return self.units[reference]
+ raise UnitError("Unit with reference %s not found" % reference)
+
+ def newBasicUnit(self, reference, symbol):
+ '''
+ @summary: generate a new basic unit
+ @param reference: the reference to the unit
+ @type reference: str
+ @param symbol: a short form of the unit
+ @type symbol: str
+ @return: the new unit
+ @rtype: BasicUnit
+ @raise UnitError: Unit with reference exists
+ '''
+ if self.units.has_key(reference):
+ raise UnitError("Unit with reference %s exists" % reference)
+ if UnitManager.__contains__(self, symbol):
+ self.duplicatesymbols.add(symbol)
+ unit = self.BasicUnit(self, reference, symbol)
+ self.units[reference] = unit
+ self.basins[unit] = set([unit])
+ self.__dict__[reference] = unit
+ return unit
+
+ def addLinearTransformedUnit(self, reference, symbol, derivedfrom, scale, offset = 0):
+ '''
+ @summary: generate a derived basic unit
+ @param reference: the reference to the unit
+ @type reference: str
+ @param symbol: a short form of the unit
+ @type symbol: str
+ @param derivedfrom: the neighbor unit
+ @type derivedfrom: Unit
+ @param scale: scaling factor for the linear transformation
+ @type scale: float
+ @param offset: the shift in the linear transformation, defaults to 0
+ @type offset: float
+ @return: the new unit
+ @rtype: DerivedUnit
+ @raise UnitError: Wrong type of derivedfrom / Unit not found / Unit with reference exists / Cannot extend basin with unit, because Unit not found
+ '''
+ if not isinstance(derivedfrom, self.Unit):
+ raise UnitError("Wrong type of derivedfrom %s" % derivedfrom)
+ if not UnitManager.__contains__(self, str(derivedfrom)):
+ raise UnitError("Unit %s not found" % derivedfrom)
+ if self.units.has_key(reference):
+ raise UnitError("Unit with reference %s exists" % reference)
+ unit = self.DerivedUnit(self, reference, symbol, derivedfrom)
+ basic = derivedfrom
+ while basic._ancestor:
+ basic = basic._ancestor
+ if not self.basins.has_key(basic):
+ raise UnitError("Cannot extend basin with unit %s, because Unit %s not found" % (unit, basic))
+ if UnitManager.__contains__(self, symbol):
+ self.duplicatesymbols.add(symbol)
+ self.units[reference] = unit
+ self.conversionpaths[(unit, derivedfrom)] = (self.op_lt_forward, (scale, offset))
+ self.conversionpaths[(derivedfrom, unit)] = (self.op_lt_inverse, (scale, offset))
+ self.basins[basic].add(unit)
+ self.__dict__[reference] = unit
+ return unit
+
+ def addRegexpTransformedUnit(self, reference, symbol, derivedfrom, expr_forward, expr_inverse):
+ '''
+ @summary: generate a new basic unit
+ @param reference: the reference to the unit
+ @type reference: str
+ @param symbol: a short form of the unit
+ @type symbol: str
+ @param derivedfrom: the neighbor unit
+ @type derivedfrom: Unit
+ @param expr_forward: the expression driving the forward transformation
+ @type expr_forward: str
+ @param expr_inverse: the expression driving the inverse transformation
+ @type expr_inverse: str
+ @return: the new unit
+ @rtype: DerivedUnit
+ @raise UnitError: Wrong type of derivedfrom / Unit not found / Unit with reference exists / Cannot extend basin with unit, because Unit not found
+ '''
+ if not isinstance(derivedfrom, self.Unit):
+ raise UnitError("Wrong type of derivedfrom %s" % derivedfrom)
+ if not UnitManager.__contains__(self, str(derivedfrom)):
+ raise UnitError("Unit %s not found" % derivedfrom)
+ if self.units.has_key(reference):
+ raise UnitError("Unit with reference %s exists" % reference)
+ unit = self.DerivedUnit(self, reference, symbol, derivedfrom)
+ basic = derivedfrom
+ while basic._ancestor:
+ basic = basic._ancestor
+ if not self.basins.has_key(basic):
+ raise UnitError("Cannot extend basin with unit %s, because Unit %s not found" % (unit, basic))
+ if UnitManager.__contains__(self, symbol):
+ self.duplicatesymbols.add(symbol)
+ self.units[reference] = unit
+ self.conversionpaths[(unit, derivedfrom)] = (self.op_rt_forward, expr_forward)
+ self.conversionpaths[(derivedfrom, unit)] = (self.op_rt_inverse, expr_inverse)
+ self.basins[basic].add(unit)
+ self.__dict__[reference] = unit
+ return unit
+
+ def getBasinByUnit(self, unit):
+ for basin in self.basins.values():
+ if unit in basin:
+ return basin
+ raise UnitError("Basin for unit %s not found" % unit)
+
+ def getBasinByReference(self, reference):
+ try:
+ unit = self[reference]
+ return self.getBasinByUnit(unit)
+ except UnitError:
+ raise UnitError("Basin for unit reference %s not found" % reference)
+
+ def op_lt_forward(self, value, (scale, offset)):
+ def op(value):
+ return scale * self.intORfloat( value ) + offset
+ if isinstance(value, list):
+ return map(lambda x: op(x), value)
+ return op(value)
+
+ def op_lt_inverse(self, value, (scale, offset)):
+ def op(value):
+ return (self.intORfloat( value ) - offset) / float(scale)
+ if isinstance(value, list):
+ return map(lambda x: op(x), value)
+ return op(value)
+
+ def op_rt_forward(self, value, expression):
+ def op(value):
+ raise UnitError("not implemented")
+ if isinstance(value, list):
+ return map(lambda x: op(x), value)
+ return op(value)
+
+ op_rt_inverse = op_rt_forward
+
+ def convert(self, value, from_unit, to_unit):
+ if not UnitManager.__contains__(self, str(from_unit)):
+ raise UnitError("Unknown from_unit")
+ if not UnitManager.__contains__(self, str(to_unit)):
+ raise UnitError("Unknown to_unit")
+ if from_unit == to_unit:
+ return value
+
+ while from_unit._ancestor:
+ op, oparg = self.conversionpaths[(from_unit, from_unit._ancestor)]
+ value = op(value, oparg)
+ from_unit = from_unit._ancestor
+ heap = []
+ while to_unit._ancestor:
+ op, oparg = self.conversionpaths[(to_unit._ancestor, to_unit)]
+ heap.append((op, oparg))
+ to_unit = to_unit._ancestor
+ if from_unit != to_unit:
+ raise UnitError("Different base units %s %s" % (from_unit, to_unit))
+ while len(heap):
+ op, oparg = heap.pop(0)
+ value = op(value, oparg)
+ return value
+
diff --git a/Monitoring/src/main/python/DataProcessing/__init__$py.class b/Monitoring/src/main/python/DataProcessing/__init__$py.class
new file mode 100644
index 0000000..e6b553c
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/__init__$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/DataProcessing/__init__.py b/Monitoring/src/main/python/DataProcessing/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/__init__.py
diff --git a/Monitoring/src/main/python/DataProcessing/__init__.py.old b/Monitoring/src/main/python/DataProcessing/__init__.py.old
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/__init__.py.old
diff --git a/Monitoring/src/main/python/DataProcessing/test.py b/Monitoring/src/main/python/DataProcessing/test.py
new file mode 100644
index 0000000..3cecc23
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/test.py
@@ -0,0 +1,372 @@
+'''
+Created on Sep 2, 2011
+
+@author: steger
+'''
+
+import site
+site.addsitedir('../site-packages')
+
+import unittest2
+from DataProcessing.Parameter import Parameter
+from random import randint
+from Example.Prefixes import prefixes, PM
+from Example.Dimensions import DM, timeinterval, countable, ipaddress, cardinal
+from Example.Units import UM, milli_second, pico_second, dozen, micro_second,\
+ piece, nano_second, second
+from DataProcessing.Data import Data
+from DataProcessing.Aggregator import Aggregator, AggregatorError, Sum, Max, Min,\
+ Mean, Deviation, Percentile
+from DataProcessing.DataReader import DataReader
+from DataProcessing.DataFormatter import JsonFormatter, DumbFormatter
+from DataProcessing.Sampler import Head, SamplerError, Tail, Sorter
+from DataProcessing.AggregatorManager import AggregatorManager
+from DataProcessing.DataHeader import DataHeaderGeneratedByDescription,\
+ DataHeader
+from DataProcessing.DataHeaderCell import DataHeaderCell, CellRequestByName,\
+ CellRequestByFeature
+from DataProcessing.DataError import DataError
+
+
+class Test(unittest2.TestCase):
+ eps = 1e-15
+
+ def different(self, expect, got):
+ return abs(expect - got) / float(expect) < self.eps
+
+ def setUp(self):
+ pass
+
+ def test_PM(self):
+ for ref, symbol, base, exponent in prefixes:
+ scale = base ** exponent
+ p = PM[ref]
+ self.assertEqual(str(p), symbol, "symbol cannot be read back %s %s" % (p, symbol))
+ self.assertEqual(p.scale, scale, "prefix %s scale error got: %f expect: %f" % (p, p.scale, scale))
+
+ self.assertTrue('p' in PM, "cannot find symbol")
+ self.assertFalse('pico' in PM, "found a symbol, which I shouldn't")
+
+
+ def test_UM(self):
+ s = randint(1, 10000)
+ expect = s * 1e-3
+ got = UM.convert(s, milli_second, second)
+ self.assertTrue(self.different(expect, got), "Different (%d ms) expect %f s got %f s" % (s, expect, got))
+ expect = s * 1e9
+ got = UM.convert(s, milli_second, pico_second)
+ self.assertTrue(self.different(expect, got), "Different (%d ms) expect %f ps got %f ps" % (s, expect, got))
+
+ kilobit = UM["kilo_bit"]
+ megaByte = UM["mega_Byte"]
+ b = randint(1, 1000)
+ expect = b * 1e-3 / 8.
+ got = UM.convert(b, kilobit, megaByte)
+ self.assertTrue(self.different(expect, got), "Different (%d kbit) expect %f MB got %f MB" % (b, expect, got))
+
+ def test_D(self):
+ dim = DM['TimeInterval']
+ for u in [second, milli_second]:
+ self.assertTrue(dim.containsUnit(u), "piece %s not in dim" % u)
+ bu = UM.getBasinByUnit(UM['second'])
+ br = UM.getBasinByReference('micro_second')
+ self.assertTrue(bu == br, "basins differ")
+
+ def test_parameter(self):
+ n = randint(0, 1000)
+ parameter = Parameter(name = 'testparameter', valuetype = float, unitmanager = UM, dimension = countable, default = (n, dozen))
+ v1 = 12 * parameter.value[0]
+ v2 = parameter.convert(piece)
+ self.assertTrue(abs(v1 - v2) < self.eps, "%d dozen and %d are not equal (type 1)" % (n, v2))
+ n = randint(0, 1000)
+ parameter.value = (n, piece)
+ v = parameter.convert(dozen)
+ self.assertTrue(abs(12 * v - n) < self.eps, "%f dozen and %d are not equal (type 2)" % (v, n))
+
+ def test_addcolumn(self):
+ '''
+ '''
+ c1 = DataHeaderCell(name = "oszlop", dimension = timeinterval, unit = milli_second)
+ c2 = DataHeaderCell(name = "oszlop2", dimension = timeinterval, unit = second, feature = "kutyafule")
+ h = DataHeader(name = "proba")
+ h.addColumn(c1)
+ h.addColumn(c2)
+ self.assertRaises(DataError, h.addColumn, c1)
+ cr1 = CellRequestByName(name = "oszlop2")
+ cr2 = CellRequestByFeature(feature = "kutyafule")
+ qr1 = [ x for x in h.getCell(cellrequest = cr1) ]
+ qr2 = [ x for x in h.getCell(cellrequest = cr2) ]
+ self.assertEqual(qr1, qr2, "getCell oopses 1")
+ qr = [ x for x in h.getCell(cellrequest = CellRequestByFeature(feature = "macskanyelv")) ]
+ self.assertEqual(len(qr), 0, "getCell oopses 2")
+
+
+ def test_createheadertemplate(self):
+ header = DataHeader(name = "traceroute")
+ cell = DataHeaderCell(name = "idx", dimension = cardinal)
+ header.addColumn(cell)
+ iphdr = DataHeader(name = "info")
+ cell = DataHeaderCell(name = "address", dimension = ipaddress)
+ iphdr.addColumn(cell)
+ rtthdr = DataHeader(name = "rttinfo")
+ cell = DataHeaderCell(name = "roundtripdelay", dimension = timeinterval, unit = milli_second)
+ rtthdr.addColumn(cell)
+ iphdr.addColumn(rtthdr)
+ header.addColumn(iphdr)
+ header2 = DataHeaderGeneratedByDescription("traceroute", [('idx', cardinal), ("info", [('address', ipaddress), ("rttinfo", [('roundtripdelay', timeinterval, milli_second)])])])
+ self.assertTrue(header == header2, "headers differ:\n%s\n%s" % (header, header2))
+
+ def test_complex_table(self):
+ '''
+ '''
+ header = DataHeaderGeneratedByDescription("traceroute", [('idx', cardinal), ("info", [('address', ipaddress), ("rttinfo", [('roundtripdelay', timeinterval, milli_second)])])])
+
+ D = Data(UM, header)
+ hoprecord = D.getTemplate(size = 2)
+ iprec1, iprec2 = hoprecord.getRecordTemplates(name = "info")
+ (rttrec1,) = iprec1.getRecordTemplates(name = "rttinfo", sizes = [3,])
+ (rttrec2,) = iprec2.getRecordTemplates(name = "rttinfo", sizes = [3,])
+
+ rttrec1.update(name = 'roundtripdelay', values = [2.925, 3.279, 3.758], unit = milli_second)
+ iprec1.update(name = 'address', values = ['192.168.1.1'])
+
+ rttrec2.update(name = 'roundtripdelay', values = [.008634, .008857, .009054], unit = second)
+ iprec2.update(name = 'address', values = ['157.181.172.126'])
+
+ hoprecord.update(name = 'idx', values = [1,2])
+
+ D.saveRecord(hoprecord)
+
+ def test_iteratorNextractor(self):
+ N = 1000
+ header = DataHeaderGeneratedByDescription("temptable", [('idx', cardinal), ('RoundTripDelay', timeinterval, milli_second)])
+ milli = map(lambda x: randint(1, 100000), range(N))
+ micro = map(lambda x: 1000*x, milli)
+ nano = map(lambda x: 1000000*x, milli)
+ D = Data(UM, header)
+ hoprecord = D.getTemplate(size = N)
+ hoprecord.update(name = 'RoundTripDelay', values = milli, unit = milli_second)
+ hoprecord.update(name = 'idx', values = range(N))
+ D.saveRecord(hoprecord)
+ DR = DataReader(datasource = D)
+ DR.extract(cellrequest = [CellRequestByName(name = 'RoundTripDelay'), CellRequestByName(name = 'RoundTripDelay', unit = micro_second), CellRequestByName(name = 'RoundTripDelay', unit = nano_second)])
+ for x in DR:
+ mill, mic, nan = milli.pop(0), micro.pop(0), nano.pop(0)
+ delta = [(x[0]-mill)/mill, (x[1]-mic)/mic, (x[2]-nan)/nan]
+ mask = map(lambda d: abs(d)< self.eps, delta)
+ self.assertFalse((False in mask), "Conversion introduced a huge error GOT: %s EXPECTED: %s %s %s DELTA: %s MASK: %s" % (x, mill,mic,nan, delta, mask))
+
+ def test_reader(self):
+ N = 10
+ header = DataHeaderGeneratedByDescription("temptable", [('idx', cardinal), ('rnd', cardinal)])
+ n1 = map(lambda x: randint(1, 100000), range(N))
+ n2 = map(lambda x: randint(1, 100000), range(N))
+ D = Data(UM, header)
+ hoprecord = D.getTemplate(size = N)
+ hoprecord.update(name = 'rnd', values = n1)
+ hoprecord.update(name = 'idx', values = range(N))
+ DR = DataReader(datasource = D)
+ self.assertFalse(DR.sourceExpanded.isSet(), "dataready, howcome?")
+ D.saveRecord(hoprecord)
+ self.assertTrue(DR.sourceExpanded.isSet(), "data not ready, howcome?")
+ for _ in DR:
+ pass
+ self.assertFalse(DR.sourceExpanded.isSet(), "data still ready, howcome?")
+ hoprecord.update(name = 'rnd', values = n2)
+ D.saveRecord(hoprecord)
+ self.assertTrue(DR.sourceExpanded.isSet(), "data not ready, howcome?")
+ DR.rewind()
+ got = len([x for x in DR])
+ self.assertEqual(2*N, got, "Expected %d items and got %d" % (2*N, got))
+
+ def test_formatter(self):
+ N = 10
+ header = DataHeaderGeneratedByDescription("temptable", [('idx', cardinal), ('rnd', cardinal)])
+ n = map(lambda x: randint(1, 100000), range(N))
+ D = Data(UM, header)
+ hoprecord = D.getTemplate(size = N)
+ hoprecord.update(name = 'rnd', values = n)
+ hoprecord.update(name = 'idx', values = range(N))
+ D.saveRecord(hoprecord)
+ DF = DumbFormatter(datasource = D)
+ res = DF.serialize()
+ #print res
+ self.assertGreater(len(res), 2, "empty? %s" % res)
+ JF = JsonFormatter(datasource = D)
+ JF.extract(cellrequest = [CellRequestByName(name = 'rnd')])
+ res = JF.serialize()
+ #print res
+ self.assertGreater(len(res), 2, "empty? %s" % res)
+
+ def test_aggregator(self):
+ N = 10
+ header = DataHeaderGeneratedByDescription("temptable", [('idx', cardinal), ('rnd', countable)])
+ n = map(lambda x: randint(1, 100000), range(N))
+ D = Data(UM, header)
+ hoprecord = D.getTemplate(size = len(n))
+ hoprecord.update(name = 'rnd', values = n)
+ hoprecord.update(name = 'idx', values = range(len(n)))
+ D.saveRecord(hoprecord)
+# self.assertRaises(AggregatorError, Aggregator(D, CellRequestByName(name = 'rnd')))
+ s = Sum(D, CellRequestByName(name = 'rnd'))
+ mn = Min(D, CellRequestByName(name = 'rnd'))
+ mx = Max(D, CellRequestByName(name = 'rnd'))
+ avg = Mean(D, CellRequestByName(name = 'rnd'))
+ S = sum(n)
+ self.assertEqual(s.data._rawrecords[0], (N, S), "sum %f != %f" % (s._aggregate, S))
+ self.assertEqual(mn.data._rawrecords[0], (N, min(n)), "min %f != %f" % (mn._aggregate, min(n)))
+ self.assertEqual(mx.data._rawrecords[0], (N, max(n)), "max %f != %f" % (mx._aggregate, max(n)))
+ self.assertEqual(avg.data._rawrecords[0], (N, S/float(N)), "avg %f != %f" % (avg._aggregate, S/N))
+
+ def test_sampler(self):
+ N = 10
+ header = DataHeaderGeneratedByDescription("temptable", [('idx', cardinal), ('rnd', countable)])
+ n = map(lambda x: randint(1, 100000), range(N))
+ D = Data(UM, header)
+ H = Head(datasource = D, head = 5)
+ DR = DataReader(datasource = H)
+
+ hoprecord = D.getTemplate(size = len(n))
+ hoprecord.update(name = 'rnd', values = n)
+ hoprecord.update(name = 'idx', values = range(len(n)))
+ D.saveRecord(hoprecord)
+
+ expect = n[:5]
+ got = [ x for _, x in DR ]
+ self.assertEqual(got, expect, "head %s != %s" % (got, expect))
+
+ T = Tail(datasource = D)
+ T.tail = 5
+ DR2 = DataReader(datasource = T)
+
+ expect = n[-5:]
+ got = [ x for _, x in DR2 ]
+ self.assertEqual(got, expect, "tail %s != %s" % (got, expect))
+
+
+ def test_DispersionOK(self):
+ header = DataHeaderGeneratedByDescription("temptable", [('idx', cardinal), ('values', countable)])
+ items = [55,56,57,63,67,68]
+ D = Data(UM, header)
+ hoprecord = D.getTemplate(size = len(items))
+ hoprecord.update(name = 'values', values = items)
+ hoprecord.update(name = 'idx', values = range(len(items)))
+ D.saveRecord(hoprecord)
+ a = Deviation(D, CellRequestByName(name = 'values'))
+ a.empirical = False
+ a.data
+ self.assertTrue((5.26 == round(a._aggregate,2) ), "Dispersion FAILED 5.26 = "+str(a._aggregate))
+
+
+
+
+ def test_PercentOK(self):
+ header = DataHeaderGeneratedByDescription("temptable", [('idx', cardinal), ('values', countable)])
+ items = [4.0,5.0,5.0,4.0]
+ D = Data(UM, header)
+ hoprecord = D.getTemplate(size = len(items))
+ hoprecord.update(name = 'values', values = items)
+ hoprecord.update(name = 'idx', values = range(len(items)))
+ D.saveRecord(hoprecord)
+ a = Percentile(D, CellRequestByName(name = 'values'))
+ a.percentile = .5
+ a.data
+ self.assertTrue((4.5 == a._aggregate ), "Percent is FAILED 4.5 = "+str(a._aggregate))
+
+ def test_Pipe(self):
+ header = DataHeaderGeneratedByDescription("temptable", [('idx', cardinal), ('values', countable)])
+ items = [55,56,57,63,67,68]
+ D = Data(UM, header)
+ hoprecord = D.getTemplate(size = len(items))
+ hoprecord.update(name = 'values', values = items)
+ hoprecord.update(name = 'idx', values = range(len(items)))
+ D.saveRecord(hoprecord)
+ a = Mean(datasource = Tail(datasource = Head(datasource = D, head = 4), tail = 2), cellrequest = CellRequestByName(name = 'values'))
+ a.data
+ res = a._aggregate
+ self.assertTrue((60 == res ), "Pipe FAILED 60 = "+str(res))
+
+ def test_aggregatorManager(self):
+ N = 12#00
+ header = DataHeaderGeneratedByDescription("temptable", [('idx', cardinal), ('rnd', countable)])
+ n = map(lambda x: randint(1, 100000), range(N))
+ n2 = map(lambda x: randint(1, 100000), range(N))
+
+ D = Data(UM, header)
+ hoprecord = D.getTemplate(size = len(n))
+ hoprecord.update(name = 'rnd', values = n)
+ hoprecord.update(name = 'idx', values = range(len(n)))
+ D.saveRecord(hoprecord)
+ n = n[-10:]
+ n = n[:5]
+ expected = sum(n)
+ AM = AggregatorManager()
+ azon = AM.newAggregator(D, CellRequestByName(name = 'rnd'), [(Tail, {'tail': 10}), (Head, {'head': 5}), (Sum, {})])
+ A = AM[ azon ]
+
+ A.data
+ got = A._aggregate
+ self.assertEqual(expected, got, "sum %f != %f" % (expected, got))
+
+ hoprecord.update(name = 'rnd', values = n2)
+ D.saveRecord(hoprecord)
+ A.data
+
+
+ got = A._aggregate
+ n2 = n2[-10:]
+ n2 = n2[:5]
+ expected = sum(n2)
+ self.assertEqual(expected, got, "2 sum %f != %f" % (expected, got))
+
+
+ def test_ComplexaggregateOK(self):
+ '''
+ '''
+ header = DataHeaderGeneratedByDescription("traceroute", [('idx', cardinal), ("info", [("rttinfo", countable)])])
+
+ D = Data(UM, header)
+ hoprecord = D.getTemplate(size = 5)
+ inf1, inf2, inf3, inf4, inf5 = hoprecord.getRecordTemplates(name = "info")
+
+ inf1.update(name = 'rttinfo', values = [10])
+ inf2.update(name = 'rttinfo', values = [15])
+ inf3.update(name = 'rttinfo', values = [16])
+ inf4.update(name = 'rttinfo', values = [18])
+ inf5.update(name = 'rttinfo', values = [20])
+
+ hoprecord.update(name = 'idx', values = [1,2,3,4,5])
+
+ D.saveRecord(hoprecord)
+ #a = Aggregator(D, ['info','rttinfo'])
+
+
+ def test_sorter(self):
+ N = 10
+ header = DataHeaderGeneratedByDescription("temptable", [('idx', cardinal), ('rnd', countable)])
+ n = map(lambda x: randint(1, 100000), range(N))
+ D = Data(UM, header)
+ S = Sorter(datasource = D, keycell = CellRequestByName(name = "rnd"))
+ SR = Sorter(datasource = D, ascending = False)
+ SR.keycell = CellRequestByName(name = "rnd")
+ DR = DataReader(datasource = S)
+ DR2 = DataReader(datasource = SR)
+
+ hoprecord = D.getTemplate(size = len(n))
+ hoprecord.update(name = 'rnd', values = n)
+ hoprecord.update(name = 'idx', values = range(len(n)))
+ D.saveRecord(hoprecord)
+
+ n.sort()
+ got = [ x for _, x in DR ]
+ self.assertEqual(got, n, "sort %s != %s" % (got, n))
+
+ n.reverse()
+ got = [ x for _, x in DR2 ]
+ self.assertEqual(got, n, "reverse sort %s != %s" % (got, n))
+
+
+
+if __name__ == "__main__":
+ #import sys;sys.argv = ['', 'Test.test_UM']
+ unittest2.main()
diff --git a/Monitoring/src/main/python/DataProcessing/test.py.old b/Monitoring/src/main/python/DataProcessing/test.py.old
new file mode 100644
index 0000000..093a286
--- /dev/null
+++ b/Monitoring/src/main/python/DataProcessing/test.py.old
@@ -0,0 +1,368 @@
+'''
+Created on Sep 2, 2011
+
+@author: steger
+'''
+import unittest
+from DataProcessing.Parameter import Parameter
+from random import randint
+from Example.Prefixes import prefixes, PM
+from Example.Dimensions import DM, timeinterval, countable, ipaddress, cardinal
+from Example.Units import UM, milli_second, pico_second, dozen, micro_second,\
+ piece, nano_second, second
+from DataProcessing.Data import Data
+from DataProcessing.Aggregator import Aggregator, AggregatorError, Sum, Max, Min,\
+ Mean, Deviation, Percentile
+from DataProcessing.DataReader import DataReader
+from DataProcessing.DataFormatter import JsonFormatter, DumbFormatter
+from DataProcessing.Sampler import Head, SamplerError, Tail, Sorter
+from DataProcessing.AggregatorManager import AggregatorManager
+from DataProcessing.DataHeader import DataHeaderGeneratedByDescription,\
+ DataHeader
+from DataProcessing.DataHeaderCell import DataHeaderCell, CellRequestByName,\
+ CellRequestByFeature
+from DataProcessing.DataError import DataError
+
+
+class Test(unittest.TestCase):
+ eps = 1e-15
+
+ def different(self, expect, got):
+ return abs(expect - got) / float(expect) < self.eps
+
+ def setUp(self):
+ pass
+
+ def test_PM(self):
+ for ref, symbol, base, exponent in prefixes:
+ scale = base ** exponent
+ p = PM[ref]
+ self.assertEqual(str(p), symbol, "symbol cannot be read back %s %s" % (p, symbol))
+ self.assertEqual(p.scale, scale, "prefix %s scale error got: %f expect: %f" % (p, p.scale, scale))
+
+ self.assertTrue('p' in PM, "cannot find symbol")
+ self.assertFalse('pico' in PM, "found a symbol, which I shouldn't")
+
+
+ def test_UM(self):
+ s = randint(1, 10000)
+ expect = s * 1e-3
+ got = UM.convert(s, milli_second, second)
+ self.assertTrue(self.different(expect, got), "Different (%d ms) expect %f s got %f s" % (s, expect, got))
+ expect = s * 1e9
+ got = UM.convert(s, milli_second, pico_second)
+ self.assertTrue(self.different(expect, got), "Different (%d ms) expect %f ps got %f ps" % (s, expect, got))
+
+ kilobit = UM["kilo_bit"]
+ megaByte = UM["mega_Byte"]
+ b = randint(1, 1000)
+ expect = b * 1e-3 / 8.
+ got = UM.convert(b, kilobit, megaByte)
+ self.assertTrue(self.different(expect, got), "Different (%d kbit) expect %f MB got %f MB" % (b, expect, got))
+
+ def test_D(self):
+ dim = DM['TimeInterval']
+ for u in [second, milli_second]:
+ self.assertTrue(dim.containsUnit(u), "piece %s not in dim" % u)
+ bu = UM.getBasinByUnit(UM['second'])
+ br = UM.getBasinByReference('micro_second')
+ self.assertTrue(bu == br, "basins differ")
+
+ def test_parameter(self):
+ n = randint(0, 1000)
+ parameter = Parameter(name = 'testparameter', valuetype = float, unitmanager = UM, dimension = countable, default = (n, dozen))
+ v1 = 12 * parameter.value[0]
+ v2 = parameter.convert(piece)
+ self.assertTrue(abs(v1 - v2) < self.eps, "%d dozen and %d are not equal (type 1)" % (n, v2))
+ n = randint(0, 1000)
+ parameter.value = (n, piece)
+ v = parameter.convert(dozen)
+ self.assertTrue(abs(12 * v - n) < self.eps, "%f dozen and %d are not equal (type 2)" % (v, n))
+
+ def test_addcolumn(self):
+ '''
+ '''
+ c1 = DataHeaderCell(name = "oszlop", dimension = timeinterval, unit = milli_second)
+ c2 = DataHeaderCell(name = "oszlop2", dimension = timeinterval, unit = second, feature = "kutyafule")
+ h = DataHeader(name = "proba")
+ h.addColumn(c1)
+ h.addColumn(c2)
+ self.assertRaises(DataError, h.addColumn, c1)
+ cr1 = CellRequestByName(name = "oszlop2")
+ cr2 = CellRequestByFeature(feature = "kutyafule")
+ qr1 = [ x for x in h.getCell(cellrequest = cr1) ]
+ qr2 = [ x for x in h.getCell(cellrequest = cr2) ]
+ self.assertEqual(qr1, qr2, "getCell oopses 1")
+ qr = [ x for x in h.getCell(cellrequest = CellRequestByFeature(feature = "macskanyelv")) ]
+ self.assertEqual(len(qr), 0, "getCell oopses 2")
+
+
+ def test_createheadertemplate(self):
+ header = DataHeader(name = "traceroute")
+ cell = DataHeaderCell(name = "idx", dimension = cardinal)
+ header.addColumn(cell)
+ iphdr = DataHeader(name = "info")
+ cell = DataHeaderCell(name = "address", dimension = ipaddress)
+ iphdr.addColumn(cell)
+ rtthdr = DataHeader(name = "rttinfo")
+ cell = DataHeaderCell(name = "roundtripdelay", dimension = timeinterval, unit = milli_second)
+ rtthdr.addColumn(cell)
+ iphdr.addColumn(rtthdr)
+ header.addColumn(iphdr)
+ header2 = DataHeaderGeneratedByDescription("traceroute", [('idx', cardinal), ("info", [('address', ipaddress), ("rttinfo", [('roundtripdelay', timeinterval, milli_second)])])])
+ self.assertTrue(header == header2, "headers differ:\n%s\n%s" % (header, header2))
+
+ def test_complex_table(self):
+ '''
+ '''
+ header = DataHeaderGeneratedByDescription("traceroute", [('idx', cardinal), ("info", [('address', ipaddress), ("rttinfo", [('roundtripdelay', timeinterval, milli_second)])])])
+
+ D = Data(UM, header)
+ hoprecord = D.getTemplate(size = 2)
+ iprec1, iprec2 = hoprecord.getRecordTemplates(name = "info")
+ (rttrec1,) = iprec1.getRecordTemplates(name = "rttinfo", sizes = [3,])
+ (rttrec2,) = iprec2.getRecordTemplates(name = "rttinfo", sizes = [3,])
+
+ rttrec1.update(name = 'roundtripdelay', values = [2.925, 3.279, 3.758], unit = milli_second)
+ iprec1.update(name = 'address', values = ['192.168.1.1'])
+
+ rttrec2.update(name = 'roundtripdelay', values = [.008634, .008857, .009054], unit = second)
+ iprec2.update(name = 'address', values = ['157.181.172.126'])
+
+ hoprecord.update(name = 'idx', values = [1,2])
+
+ D.saveRecord(hoprecord)
+
+ def test_iteratorNextractor(self):
+ N = 1000
+ header = DataHeaderGeneratedByDescription("temptable", [('idx', cardinal), ('RoundTripDelay', timeinterval, milli_second)])
+ milli = map(lambda x: randint(1, 100000), range(N))
+ micro = map(lambda x: 1000*x, milli)
+ nano = map(lambda x: 1000000*x, milli)
+ D = Data(UM, header)
+ hoprecord = D.getTemplate(size = N)
+ hoprecord.update(name = 'RoundTripDelay', values = milli, unit = milli_second)
+ hoprecord.update(name = 'idx', values = range(N))
+ D.saveRecord(hoprecord)
+ DR = DataReader(datasource = D)
+ DR.extract(cellrequest = [CellRequestByName(name = 'RoundTripDelay'), CellRequestByName(name = 'RoundTripDelay', unit = micro_second), CellRequestByName(name = 'RoundTripDelay', unit = nano_second)])
+ for x in DR:
+ mill, mic, nan = milli.pop(0), micro.pop(0), nano.pop(0)
+ delta = [(x[0]-mill)/mill, (x[1]-mic)/mic, (x[2]-nan)/nan]
+ mask = map(lambda d: abs(d)< self.eps, delta)
+ self.assertFalse((False in mask), "Conversion introduced a huge error GOT: %s EXPECTED: %s %s %s DELTA: %s MASK: %s" % (x, mill,mic,nan, delta, mask))
+
+ def test_reader(self):
+ N = 10
+ header = DataHeaderGeneratedByDescription("temptable", [('idx', cardinal), ('rnd', cardinal)])
+ n1 = map(lambda x: randint(1, 100000), range(N))
+ n2 = map(lambda x: randint(1, 100000), range(N))
+ D = Data(UM, header)
+ hoprecord = D.getTemplate(size = N)
+ hoprecord.update(name = 'rnd', values = n1)
+ hoprecord.update(name = 'idx', values = range(N))
+ DR = DataReader(datasource = D)
+ self.assertFalse(DR.sourceExpanded.isSet(), "dataready, howcome?")
+ D.saveRecord(hoprecord)
+ self.assertTrue(DR.sourceExpanded.isSet(), "data not ready, howcome?")
+ for _ in DR:
+ pass
+ self.assertFalse(DR.sourceExpanded.isSet(), "data still ready, howcome?")
+ hoprecord.update(name = 'rnd', values = n2)
+ D.saveRecord(hoprecord)
+ self.assertTrue(DR.sourceExpanded.isSet(), "data not ready, howcome?")
+ DR.rewind()
+ got = len([x for x in DR])
+ self.assertEqual(2*N, got, "Expected %d items and got %d" % (2*N, got))
+
+ def test_formatter(self):
+ N = 10
+ header = DataHeaderGeneratedByDescription("temptable", [('idx', cardinal), ('rnd', cardinal)])
+ n = map(lambda x: randint(1, 100000), range(N))
+ D = Data(UM, header)
+ hoprecord = D.getTemplate(size = N)
+ hoprecord.update(name = 'rnd', values = n)
+ hoprecord.update(name = 'idx', values = range(N))
+ D.saveRecord(hoprecord)
+ DF = DumbFormatter(datasource = D)
+ res = DF.serialize()
+ #print res
+ self.assertGreater(len(res), 2, "empty? %s" % res)
+ JF = JsonFormatter(datasource = D)
+ JF.extract(cellrequest = [CellRequestByName(name = 'rnd')])
+ res = JF.serialize()
+ #print res
+ self.assertGreater(len(res), 2, "empty? %s" % res)
+
+ def test_aggregator(self):
+ N = 10
+ header = DataHeaderGeneratedByDescription("temptable", [('idx', cardinal), ('rnd', countable)])
+ n = map(lambda x: randint(1, 100000), range(N))
+ D = Data(UM, header)
+ hoprecord = D.getTemplate(size = len(n))
+ hoprecord.update(name = 'rnd', values = n)
+ hoprecord.update(name = 'idx', values = range(len(n)))
+ D.saveRecord(hoprecord)
+# self.assertRaises(AggregatorError, Aggregator(D, CellRequestByName(name = 'rnd')))
+ s = Sum(D, CellRequestByName(name = 'rnd'))
+ mn = Min(D, CellRequestByName(name = 'rnd'))
+ mx = Max(D, CellRequestByName(name = 'rnd'))
+ avg = Mean(D, CellRequestByName(name = 'rnd'))
+ S = sum(n)
+ self.assertEqual(s.data._rawrecords[0], (N, S), "sum %f != %f" % (s._aggregate, S))
+ self.assertEqual(mn.data._rawrecords[0], (N, min(n)), "min %f != %f" % (mn._aggregate, min(n)))
+ self.assertEqual(mx.data._rawrecords[0], (N, max(n)), "max %f != %f" % (mx._aggregate, max(n)))
+ self.assertEqual(avg.data._rawrecords[0], (N, S/float(N)), "avg %f != %f" % (avg._aggregate, S/N))
+
+ def test_sampler(self):
+ N = 10
+ header = DataHeaderGeneratedByDescription("temptable", [('idx', cardinal), ('rnd', countable)])
+ n = map(lambda x: randint(1, 100000), range(N))
+ D = Data(UM, header)
+ H = Head(datasource = D, head = 5)
+ DR = DataReader(datasource = H)
+
+ hoprecord = D.getTemplate(size = len(n))
+ hoprecord.update(name = 'rnd', values = n)
+ hoprecord.update(name = 'idx', values = range(len(n)))
+ D.saveRecord(hoprecord)
+
+ expect = n[:5]
+ got = [ x for _, x in DR ]
+ self.assertEqual(got, expect, "head %s != %s" % (got, expect))
+
+ T = Tail(datasource = D)
+ T.tail = 5
+ DR2 = DataReader(datasource = T)
+
+ expect = n[-5:]
+ got = [ x for _, x in DR2 ]
+ self.assertEqual(got, expect, "tail %s != %s" % (got, expect))
+
+
+ def test_DispersionOK(self):
+ header = DataHeaderGeneratedByDescription("temptable", [('idx', cardinal), ('values', countable)])
+ items = [55,56,57,63,67,68]
+ D = Data(UM, header)
+ hoprecord = D.getTemplate(size = len(items))
+ hoprecord.update(name = 'values', values = items)
+ hoprecord.update(name = 'idx', values = range(len(items)))
+ D.saveRecord(hoprecord)
+ a = Deviation(D, CellRequestByName(name = 'values'))
+ a.empirical = False
+ a.data
+ self.assertTrue((5.26 == round(a._aggregate,2) ), "Dispersion FAILED 5.26 = "+str(a._aggregate))
+
+
+
+
+ def test_PercentOK(self):
+ header = DataHeaderGeneratedByDescription("temptable", [('idx', cardinal), ('values', countable)])
+ items = [4.0,5.0,5.0,4.0]
+ D = Data(UM, header)
+ hoprecord = D.getTemplate(size = len(items))
+ hoprecord.update(name = 'values', values = items)
+ hoprecord.update(name = 'idx', values = range(len(items)))
+ D.saveRecord(hoprecord)
+ a = Percentile(D, CellRequestByName(name = 'values'))
+ a.percentile = .5
+ a.data
+ self.assertTrue((4.5 == a._aggregate ), "Percent is FAILED 4.5 = "+str(a._aggregate))
+
+ def test_Pipe(self):
+ header = DataHeaderGeneratedByDescription("temptable", [('idx', cardinal), ('values', countable)])
+ items = [55,56,57,63,67,68]
+ D = Data(UM, header)
+ hoprecord = D.getTemplate(size = len(items))
+ hoprecord.update(name = 'values', values = items)
+ hoprecord.update(name = 'idx', values = range(len(items)))
+ D.saveRecord(hoprecord)
+ a = Mean(datasource = Tail(datasource = Head(datasource = D, head = 4), tail = 2), cellrequest = CellRequestByName(name = 'values'))
+ a.data
+ res = a._aggregate
+ self.assertTrue((60 == res ), "Pipe FAILED 60 = "+str(res))
+
+ def test_aggregatorManager(self):
+ N = 12#00
+ header = DataHeaderGeneratedByDescription("temptable", [('idx', cardinal), ('rnd', countable)])
+ n = map(lambda x: randint(1, 100000), range(N))
+ n2 = map(lambda x: randint(1, 100000), range(N))
+
+ D = Data(UM, header)
+ hoprecord = D.getTemplate(size = len(n))
+ hoprecord.update(name = 'rnd', values = n)
+ hoprecord.update(name = 'idx', values = range(len(n)))
+ D.saveRecord(hoprecord)
+ n = n[-10:]
+ n = n[:5]
+ expected = sum(n)
+ AM = AggregatorManager()
+ azon = AM.newAggregator(D, CellRequestByName(name = 'rnd'), [(Tail, {'tail': 10}), (Head, {'head': 5}), (Sum, {})])
+ A = AM[ azon ]
+
+ A.data
+ got = A._aggregate
+ self.assertEqual(expected, got, "sum %f != %f" % (expected, got))
+
+ hoprecord.update(name = 'rnd', values = n2)
+ D.saveRecord(hoprecord)
+ A.data
+
+
+ got = A._aggregate
+ n2 = n2[-10:]
+ n2 = n2[:5]
+ expected = sum(n2)
+ self.assertEqual(expected, got, "2 sum %f != %f" % (expected, got))
+
+
+ def test_ComplexaggregateOK(self):
+ '''
+ '''
+ header = DataHeaderGeneratedByDescription("traceroute", [('idx', cardinal), ("info", [("rttinfo", countable)])])
+
+ D = Data(UM, header)
+ hoprecord = D.getTemplate(size = 5)
+ inf1, inf2, inf3, inf4, inf5 = hoprecord.getRecordTemplates(name = "info")
+
+ inf1.update(name = 'rttinfo', values = [10])
+ inf2.update(name = 'rttinfo', values = [15])
+ inf3.update(name = 'rttinfo', values = [16])
+ inf4.update(name = 'rttinfo', values = [18])
+ inf5.update(name = 'rttinfo', values = [20])
+
+ hoprecord.update(name = 'idx', values = [1,2,3,4,5])
+
+ D.saveRecord(hoprecord)
+ #a = Aggregator(D, ['info','rttinfo'])
+
+
+ def test_sorter(self):
+ N = 10
+ header = DataHeaderGeneratedByDescription("temptable", [('idx', cardinal), ('rnd', countable)])
+ n = map(lambda x: randint(1, 100000), range(N))
+ D = Data(UM, header)
+ S = Sorter(datasource = D, keycell = CellRequestByName(name = "rnd"))
+ SR = Sorter(datasource = D, ascending = False)
+ SR.keycell = CellRequestByName(name = "rnd")
+ DR = DataReader(datasource = S)
+ DR2 = DataReader(datasource = SR)
+
+ hoprecord = D.getTemplate(size = len(n))
+ hoprecord.update(name = 'rnd', values = n)
+ hoprecord.update(name = 'idx', values = range(len(n)))
+ D.saveRecord(hoprecord)
+
+ n.sort()
+ got = [ x for _, x in DR ]
+ self.assertEqual(got, n, "sort %s != %s" % (got, n))
+
+ n.reverse()
+ got = [ x for _, x in DR2 ]
+ self.assertEqual(got, n, "reverse sort %s != %s" % (got, n))
+
+
+
+if __name__ == "__main__":
+ #import sys;sys.argv = ['', 'Test.test_UM']
+ unittest.main()
diff --git a/Monitoring/src/main/python/Database/ConnectionPool.py b/Monitoring/src/main/python/Database/ConnectionPool.py
new file mode 100644
index 0000000..a9f2aa0
--- /dev/null
+++ b/Monitoring/src/main/python/Database/ConnectionPool.py
@@ -0,0 +1,17 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+
+class ConnectionPool(object):
+ '''
+ classdocs
+ '''
+
+
+ def __init__(self, params):
+ '''
+ Constructor
+ '''
+ \ No newline at end of file
diff --git a/Monitoring/src/main/python/Database/ConnectionPool.py.old b/Monitoring/src/main/python/Database/ConnectionPool.py.old
new file mode 100644
index 0000000..a9f2aa0
--- /dev/null
+++ b/Monitoring/src/main/python/Database/ConnectionPool.py.old
@@ -0,0 +1,17 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+
+class ConnectionPool(object):
+ '''
+ classdocs
+ '''
+
+
+ def __init__(self, params):
+ '''
+ Constructor
+ '''
+ \ No newline at end of file
diff --git a/Monitoring/src/main/python/Database/DatabaseAccess.py b/Monitoring/src/main/python/Database/DatabaseAccess.py
new file mode 100644
index 0000000..d24a7bd
--- /dev/null
+++ b/Monitoring/src/main/python/Database/DatabaseAccess.py
@@ -0,0 +1,20 @@
+'''
+Created on 08.08.2011
+
+@author: csc
+'''
+
+from ConnectionPool import ConnectionPool
+
+class DatabaseAccess():
+ '''
+ classdocs
+ '''
+
+
+ def __init__(self, parent):
+ '''
+ Constructor
+ '''
+ self.parent = parent
+ self.pool = ConnectionPool(params = "foo") \ No newline at end of file
diff --git a/Monitoring/src/main/python/Database/DatabaseAccess.py.old b/Monitoring/src/main/python/Database/DatabaseAccess.py.old
new file mode 100644
index 0000000..d24a7bd
--- /dev/null
+++ b/Monitoring/src/main/python/Database/DatabaseAccess.py.old
@@ -0,0 +1,20 @@
+'''
+Created on 08.08.2011
+
+@author: csc
+'''
+
+from ConnectionPool import ConnectionPool
+
+class DatabaseAccess():
+ '''
+ classdocs
+ '''
+
+
+ def __init__(self, parent):
+ '''
+ Constructor
+ '''
+ self.parent = parent
+ self.pool = ConnectionPool(params = "foo") \ No newline at end of file
diff --git a/Monitoring/src/main/python/Database/StorageFIFO.py b/Monitoring/src/main/python/Database/StorageFIFO.py
new file mode 100644
index 0000000..ea406d2
--- /dev/null
+++ b/Monitoring/src/main/python/Database/StorageFIFO.py
@@ -0,0 +1,17 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+
+class StorageFIFO(object):
+ '''
+ classdocs
+ '''
+
+
+ def __init__(self, params):
+ '''
+ Constructor
+ '''
+ \ No newline at end of file
diff --git a/Monitoring/src/main/python/Database/StorageFIFO.py.old b/Monitoring/src/main/python/Database/StorageFIFO.py.old
new file mode 100644
index 0000000..ea406d2
--- /dev/null
+++ b/Monitoring/src/main/python/Database/StorageFIFO.py.old
@@ -0,0 +1,17 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+
+class StorageFIFO(object):
+ '''
+ classdocs
+ '''
+
+
+ def __init__(self, params):
+ '''
+ Constructor
+ '''
+ \ No newline at end of file
diff --git a/Monitoring/src/main/python/Database/__init__.py b/Monitoring/src/main/python/Database/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Monitoring/src/main/python/Database/__init__.py
diff --git a/Monitoring/src/main/python/Database/__init__.py.old b/Monitoring/src/main/python/Database/__init__.py.old
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Monitoring/src/main/python/Database/__init__.py.old
diff --git a/Monitoring/src/main/python/Database/test.py b/Monitoring/src/main/python/Database/test.py
new file mode 100644
index 0000000..01c954f
--- /dev/null
+++ b/Monitoring/src/main/python/Database/test.py
@@ -0,0 +1,26 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+import unittest
+
+
+class Test(unittest.TestCase):
+
+
+ def setUp(self):
+ pass
+
+
+ def tearDown(self):
+ pass
+
+
+ def testName(self):
+ pass
+
+
+if __name__ == "__main__":
+ #import sys;sys.argv = ['', 'Test.testName']
+ unittest.main() \ No newline at end of file
diff --git a/Monitoring/src/main/python/Database/test.py.old b/Monitoring/src/main/python/Database/test.py.old
new file mode 100644
index 0000000..01c954f
--- /dev/null
+++ b/Monitoring/src/main/python/Database/test.py.old
@@ -0,0 +1,26 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+import unittest
+
+
+class Test(unittest.TestCase):
+
+
+ def setUp(self):
+ pass
+
+
+ def tearDown(self):
+ pass
+
+
+ def testName(self):
+ pass
+
+
+if __name__ == "__main__":
+ #import sys;sys.argv = ['', 'Test.testName']
+ unittest.main() \ No newline at end of file
diff --git a/Monitoring/src/main/python/Driver/Driver$py.class b/Monitoring/src/main/python/Driver/Driver$py.class
new file mode 100644
index 0000000..1bcb0c2
--- /dev/null
+++ b/Monitoring/src/main/python/Driver/Driver$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Driver/Driver.py b/Monitoring/src/main/python/Driver/Driver.py
new file mode 100644
index 0000000..a1f7dad
--- /dev/null
+++ b/Monitoring/src/main/python/Driver/Driver.py
@@ -0,0 +1,17 @@
+'''
+Created on Oct 28, 2011
+
+@author: steger, jozsef
+@organization: ELTE
+@contact: steger@complex.elte.hu
+'''
+
+class Driver(object):
+ '''
+ @summary: an empty driver to serve as an ancient class
+ @author: steger, jozsef
+ '''
+ pass
+
+class DriverError(Exception):
+ pass \ No newline at end of file
diff --git a/Monitoring/src/main/python/Driver/Driver.py.old b/Monitoring/src/main/python/Driver/Driver.py.old
new file mode 100644
index 0000000..a1f7dad
--- /dev/null
+++ b/Monitoring/src/main/python/Driver/Driver.py.old
@@ -0,0 +1,17 @@
+'''
+Created on Oct 28, 2011
+
+@author: steger, jozsef
+@organization: ELTE
+@contact: steger@complex.elte.hu
+'''
+
+class Driver(object):
+ '''
+ @summary: an empty driver to serve as an ancient class
+ @author: steger, jozsef
+ '''
+ pass
+
+class DriverError(Exception):
+ pass \ No newline at end of file
diff --git a/Monitoring/src/main/python/Driver/REST.py b/Monitoring/src/main/python/Driver/REST.py
new file mode 100644
index 0000000..aed2241
--- /dev/null
+++ b/Monitoring/src/main/python/Driver/REST.py
@@ -0,0 +1,94 @@
+'''
+Created on Feb 4, 2013
+
+@author: Sandor Laki, rewritten for jython
+@organization: ELTE
+@contact: laki@complex.elte.hu
+'''
+
+from Driver import Driver
+from Credential.credentialtypes import UsernamePassword
+import urllib2, base64
+
+from javax.net.ssl import TrustManager, X509TrustManager
+from jarray import array
+from javax.net.ssl import SSLContext
+
+class TrustAllX509TrustManager(X509TrustManager):
+ def checkClientTrusted(self, chain, auth):
+ pass
+
+ def checkServerTrusted(self, chain, auth):
+ pass
+
+ def getAcceptedIssuers(self):
+ return None
+
+# It is not threadsafe, since setDefault is a global function...
+def setSSLTrusted():
+ trust_managers = array([TrustAllX509TrustManager()], TrustManager)
+ TRUST_ALL_CONTEXT = SSLContext.getInstance("SSL")
+ TRUST_ALL_CONTEXT.init(None, trust_managers, None)
+ DEFAULT_CONTEXT = SSLContext.getDefault()
+ SSLContext.setDefault(TRUST_ALL_CONTEXT)
+
+class RESTDriver(Driver):
+ '''
+ @summary: implements REST driver to fetch using http GET
+ @cvar timeout: timeout of connection
+ @type timeout: float
+ @cvar cache: a cache directory
+ @type cache: str
+ @ivar url: a default document locator to be reused
+ @type url: str
+ @ivar proxy: an interface to the http server
+ @type proxy: httplib2.Http
+ '''
+ timeout = 10
+
+ def __init__(self, url, credential = None, validate_ssl = False):
+ '''
+ @summary: initializes a proxy to the http service and saves a default document locator
+ @param url: the default document locator
+ @type url: str
+ @param credential: an authentication secret
+ @type credential: L{Credential} or None
+ @param validate_ssl: whether to apply strick certificate validation, default is False
+ @type validate_ssl: bool
+ '''
+ self.url = url
+ self.proxy = urllib2.Request(self.url)
+ if isinstance(credential, UsernamePassword):
+ base64string = base64.encodestring('%s:%s' % (credential.username, credential.password)).replace('\n', '')
+ self.proxy.add_header("Authorization", "Basic %s" % base64string)
+# self.proxy = Http(cache = self.cache, timeout = self.timeout)
+# self.proxy.disable_ssl_certificate_validation = not validate_ssl
+# if isinstance(credential, UsernamePassword):
+# # use password authentication
+# self.proxy.add_credentials(credential.username, credential.password)
+
+ def fetch(self, url = None):
+ '''
+ @summary: retrieve the document
+ @param url: the document locator, if not present the default is used
+ @type url: str or None
+ @return: the remote document
+ @rtype: str or None
+ @note: if the remote content cached is not changed, None is returned
+ '''
+ if url is None:
+ url = self.url
+ try:
+ resp = urllib2.urlopen(self.proxy)
+ return resp.read()
+ except Exception, e:
+ print "Error: %s" % e
+ return None
+# status, response = self.proxy.request(uri = url, method = "GET")
+# if status.status == 200:
+# return response
+# if status.status == 304:
+# self.logger.warning("remote content @ %s was not changed" % url)
+# return None
+# self.logger.error("%s -- retrieving @%s failed: %s" % (status, url, response))
+# return None
diff --git a/Monitoring/src/main/python/Driver/SOAPClient$py.class b/Monitoring/src/main/python/Driver/SOAPClient$py.class
new file mode 100644
index 0000000..ff46733
--- /dev/null
+++ b/Monitoring/src/main/python/Driver/SOAPClient$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Driver/SOAPClient.py b/Monitoring/src/main/python/Driver/SOAPClient.py
new file mode 100644
index 0000000..e61888d
--- /dev/null
+++ b/Monitoring/src/main/python/Driver/SOAPClient.py
@@ -0,0 +1,18 @@
+'''
+Created on Sep 2, 2011
+
+@author: laki, sandor
+@organization: ELTE
+@contact: laki@complex.elte.hu
+@author: steger, jozsef
+'''
+
+import suds
+import suds.transport
+from Driver import Driver
+
+class SOAPClient(Driver, suds.client.Client):
+ pass
+SOAPSecurity=suds.wsse.Security
+SOAPUsernameToken=suds.wsse.UsernameToken
+SOAPHttpAuthenticated=suds.transport.http.HttpAuthenticated
diff --git a/Monitoring/src/main/python/Driver/SOAPClient.py.old b/Monitoring/src/main/python/Driver/SOAPClient.py.old
new file mode 100644
index 0000000..e61888d
--- /dev/null
+++ b/Monitoring/src/main/python/Driver/SOAPClient.py.old
@@ -0,0 +1,18 @@
+'''
+Created on Sep 2, 2011
+
+@author: laki, sandor
+@organization: ELTE
+@contact: laki@complex.elte.hu
+@author: steger, jozsef
+'''
+
+import suds
+import suds.transport
+from Driver import Driver
+
+class SOAPClient(Driver, suds.client.Client):
+ pass
+SOAPSecurity=suds.wsse.Security
+SOAPUsernameToken=suds.wsse.UsernameToken
+SOAPHttpAuthenticated=suds.transport.http.HttpAuthenticated
diff --git a/Monitoring/src/main/python/Driver/SshExec$py.class b/Monitoring/src/main/python/Driver/SshExec$py.class
new file mode 100644
index 0000000..c627e9b
--- /dev/null
+++ b/Monitoring/src/main/python/Driver/SshExec$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Driver/SshExec.py b/Monitoring/src/main/python/Driver/SshExec.py
new file mode 100644
index 0000000..5de7051
--- /dev/null
+++ b/Monitoring/src/main/python/Driver/SshExec.py
@@ -0,0 +1,187 @@
+from __future__ import with_statement
+'''
+Created on Feb 29, 2012
+
+@summary: A Jython compatible ssh driver
+@author: Sandor Laki
+@organization: ELTE
+@contact: lakis@inf.elte.hu
+'''
+
+
+from java.io import BufferedReader
+from java.io import IOException
+from java.io import InputStream
+from java.io import InputStreamReader
+from java.io import File
+from jarray import zeros
+from java.lang import String
+from com.jcraft.jsch import JSch
+from StringIO import StringIO
+#import libssh2
+#import socket
+from tempfile import mkstemp
+from os import close, write, unlink, path, access, R_OK
+#from SshKeygen import SshKeygen
+from threading import Lock
+from Credential.credentialtypes import Credential, UsernameRSAKey,\
+ UsernamePassword
+from Driver import Driver
+import org.python.core.util.FileUtil as FileUtil
+import java.lang.Exception
+#from org.slf4j import Logger
+#from org.slf4j import LoggerFactory
+#import org.python.core.PyFile as PyFile
+#driverlock = Lock()
+
+class SshExec(Driver):
+ '''
+ @summary: this class handles control of a monitoring tool over an ssh channel
+ @author: steger, jozsef
+ @todo: get rid of global lock if possible
+ @note: if no global lock is there, a lot os segmentation faults occur in a concurrent session opening and program execution
+ '''
+ #lock = Lock() #driverlock
+# log = LoggerFactory.getLogger("eu.novi.monitoring.Driver.SshExec")
+
+ def __init__(self, host, credential, port = 22, command = "echo helloworld @ `hostname`", known_host = None):
+ '''
+ @summary: initiates a class to execute a single remote command via ssh protocol, tekes care of opening ssh session
+ @param host: name of the hos machine
+ @type host: string
+ @param credential: authentication details
+ @type credential: Credential
+ @param port: port of the ssh service
+ @type port: integer
+ @param command: the remote command to execute later
+ @type command: string
+ @raise Exception: wrong authentication type
+
+ @note: only a single command can be run by the class
+
+
+ @todo: check what happens with commands run in the background
+ '''
+ self.lock = Lock()
+ self.session = None
+ self.channel = None
+ if host is None: return
+
+ if not isinstance(credential, Credential):
+ raise Exception("wrong type of credential")
+ with self.lock:
+ self._result = ""
+# self.session = libssh2.Session()
+# self.session.set_banner()
+ self.command = command
+ self.fn_pub = None
+# self.pemfile = None
+
+ try:
+ self.jsch = JSch()
+# self.log.info("Host:%s Username:%s Port:%s Command:%s" % (host, credential.username, port, self.command))
+ print "h:%s un:%s p:%s" % (host, credential.username, port)
+ self.session = self.jsch.getSession(credential.username, host, port)
+ #self.jsch.setKnownHosts("/home/maven/.ssh/known_hosts")
+
+ if isinstance(credential, UsernameRSAKey):
+ privatekey = credential.rsakey
+# self.log.info("Credential: %s" % privatekey)
+ self.jsch.addIdentity(privatekey)
+ self.session.setConfig("StrictHostKeyChecking", "no")
+ self.session.setTimeout(5000);
+ print "identity file %s\n" % privatekey
+ PATH=privatekey
+ if path.exists(PATH) and path.isfile(PATH) and access(PATH, R_OK):
+ print "File exists and is readable"
+# self.log.info("Privatekey exists and is readable")
+ else:
+# self.log.info("RSA key is missing: %s" % PATH)
+ raise Exception("RSA key file is missing or not readable: %s" % PATH)
+
+# publickey_srt = SshKeygen.convert_key_from_file(privatekey)
+# fd, publickey = mkstemp(suffix = ".pub", prefix = "rsa", text = True)
+# write(fd, "ssh-rsa %s" % publickey_srt)
+# close(fd)
+# self.fn_pub = publickey
+# self.session._session.userauth_publickey_fromfile(credential.username, publickey, privatekey, credential.password)
+ elif isinstance(credential, UsernamePassword):
+ self.session.setPassword( credential.password )
+ else:
+ raise Exception("wrong type of credential")
+
+ self.session.connect()
+ except java.lang.Exception, e:
+# self.log.info("Connection error")
+ print "Connection Error"
+ print "Exc:%s" % e
+ self.session = None
+ self.channel = None
+ #raise e
+
+# self.channel = self.session.open_session()
+
+ def execute(self):
+ '''
+ @summary: invokes the remote command to run. The standard output of the command is stored in the result variable.
+ '''
+ with self.lock:
+# self.log.info("Execute:%s" % self.command)
+ if self.session is None: return StringIO("")
+ self.channel = self.session.openChannel("exec")
+ self.channel.setCommand(self.command)
+ self.channel.setInputStream(None)
+
+ stdo = self.channel.getInputStream()
+# br = BufferedReader( InputStreamReader( stdo ) )
+ self.channel.connect()
+
+ return FileUtil.wrap( stdo )
+
+# buffer = 4096
+# buf = zeros(1024,'b')
+# while True:
+# while stdo.available()>0:
+# i=stdo.read(buf,0,1024)
+# if i<0: break
+# self._result += str(String(buf,0,i))
+# if channel.isClosed(): break
+# channel.disconnect()
+# return StringIO(self._result)
+
+# def _get_result(self):
+# '''
+# @summary: the copy of the standard output of the remote command
+# @return: the standard output of the remote command
+# @rtype: string
+# '''
+# return str(self._result)
+
+ def close(self):
+ '''
+ @summary: the destructor takes care of closing the session and removing the public key file stored temporary
+ '''
+ with self.lock:
+ if self.channel is not None:
+ self.channel.disconnect()
+ self.channel = None
+ if self.session is not None:
+ self.session.disconnect()
+# if self.fn_pub is not None:
+# unlink(self.fn_pub)
+
+# result = property(_get_result,None,None)
+
+ def __del__(self):
+ self.close()
+
+ def _isConnected(self):
+ try:
+ if self.channel is not None: return True
+ else: return False
+ except:
+ return False
+
+ isConnected = property(_isConnected,None, None)
+
+
diff --git a/Monitoring/src/main/python/Driver/SshExec.py.old b/Monitoring/src/main/python/Driver/SshExec.py.old
new file mode 100644
index 0000000..5de3b14
--- /dev/null
+++ b/Monitoring/src/main/python/Driver/SshExec.py.old
@@ -0,0 +1,92 @@
+'''
+Created on Jul 18, 2011
+
+@author: steger, jozsef
+@organization: ELTE
+@contact: steger@complex.elte.hu
+'''
+
+from Driver import Driver, DriverError
+from paramiko import SSHClient, RSAKey, SSHException, AutoAddPolicy
+from Credential.credentialtypes import UsernamePassword, UsernameRSAKey
+from time import sleep
+
+class SshDriver(Driver):
+ timeout = 5
+ trials = 3
+ wait = .2
+
+ def __init__(self):
+ self.client = None
+
+ def __del__(self):
+ self.close()
+
+ def connect(self, host, credential, port = 22, known_host = None):
+ self.client = SSHClient()
+ if known_host is None:
+ self.client.set_missing_host_key_policy( AutoAddPolicy() )
+ else:
+ self.client.load_host_keys(filename = known_host)
+ if isinstance(credential, UsernamePassword):
+ # use password authentication
+ self.client.connect(hostname = host, port = port,
+ username = credential.username, password =credential.password,
+ timeout = self.timeout, look_for_keys = False, compress = True)
+ elif isinstance(credential, UsernameRSAKey):
+ # use the RSA key
+ if credential.password:
+ pw = credential.password
+ else:
+ pw = None
+ key = RSAKey(password = pw, filename = credential.rsakey)
+ n = self.trials
+ while n:
+ try:
+ self.client.connect(hostname = host, port = port,
+ username = credential.username, pkey = key,
+ timeout = self.timeout, look_for_keys = False, compress = True)
+ break
+ except SSHException, e:
+ if e.message.startswith("Error reading SSH protocol banner"):
+ n -= 1
+ print "WW: retry to connect @%s in %f seconds" % (host, self.wait)
+ sleep(self.wait)
+ else:
+ raise
+ if not self.isConnected:
+ self.close()
+ raise DriverError("Cannot connect @%s " % host)
+ self.host = host
+
+ def close(self):
+ try:
+ self.client.close()
+ except:
+ pass
+ finally:
+ self.client = None
+
+ @property
+ def isConnected(self):
+ try:
+ return self.client.get_transport().is_active()
+ except:
+ return False
+
+class SshExec(SshDriver):
+ def __init__(self, host, credential, port = 22, command = "echo helloworld @ `hostname`", known_host = None):
+ SshDriver.__init__(self)
+ self.connect(host, credential, port, known_host)
+ self.command = command
+
+ def execute(self, command = None):
+ if not self.isConnected:
+ raise DriverError("Not connected")
+ if command is None:
+ command = self.command
+ _, stout, sterr = self.client.exec_command(command = command)
+ e = sterr.read()
+ if len(e):
+ print "EE: execution @%s '%s' failed: %s" % (self.host, command, e)
+ return stout
diff --git a/Monitoring/src/main/python/Driver/SshTunnel.py b/Monitoring/src/main/python/Driver/SshTunnel.py
new file mode 100644
index 0000000..83649df
--- /dev/null
+++ b/Monitoring/src/main/python/Driver/SshTunnel.py
@@ -0,0 +1,113 @@
+'''
+Created on Jan 14, 2013
+
+@author: steger
+'''
+
+import select
+import SocketServer
+import sys
+
+import paramiko
+
+#FIXME: let an SshTunnel instance launch a new thread;
+#TODO: docs
+#TODO: localport could be retrieved from a pool
+
+class ForwardServer (SocketServer.ThreadingTCPServer):
+ daemon_threads = True
+ allow_reuse_address = True
+
+
+class Handler (SocketServer.BaseRequestHandler):
+
+ def handle(self):
+ try:
+ chan = self.ssh_transport.open_channel('direct-tcpip',
+ (self.chain_host, self.chain_port),
+ self.request.getpeername())
+ except Exception, e:
+ print('Incoming request to %s:%d failed: %s' % (self.chain_host,
+ self.chain_port,
+ repr(e)))
+ return
+ if chan is None:
+ print('Incoming request to %s:%d was rejected by the SSH server.' %
+ (self.chain_host, self.chain_port))
+ return
+
+ print('Connected! Tunnel open %r -> %r -> %r' % (self.request.getpeername(),
+ chan.getpeername(), (self.chain_host, self.chain_port)))
+ while True:
+ r, w, x = select.select([self.request, chan], [], [])
+ if self.request in r:
+ data = self.request.recv(1024)
+ if len(data) == 0:
+ break
+ chan.send(data)
+ if chan in r:
+ data = chan.recv(1024)
+ if len(data) == 0:
+ break
+ self.request.send(data)
+ chan.close()
+ self.request.close()
+ print ('Tunnel closed from %r' % (self.request.getpeername(),))
+
+
+
+
+class SshTunnel(object):
+ '''
+This class establishes a connection to the requested SSH server and sets up local port
+forwarding (the openssh -L option) from a local port through a tunneled
+connection to a destination reachable from the SSH server machine.
+ '''
+ def __init__(self):
+ pass
+
+ def setup(self, localport = 4000, username = 'root', server = 'localhost', serverport = 22, remoteserver = 'localhost', remoteport = 22, keyfile = None, password = None,
+ look_for_keys = False):
+ '''
+ Constructor
+ '''
+ client = paramiko.SSHClient()
+ client.load_system_host_keys()
+ client.set_missing_host_key_policy(paramiko.WarningPolicy())
+
+ print ('Connecting to ssh host %s:%d ...' % (server, serverport))
+ try:
+ client.connect(server, serverport, username = username, key_filename = keyfile,
+ look_for_keys = look_for_keys, password = password)
+ except Exception, e:
+ print '*** Failed to connect to %s:%d: %r' % (server, serverport, e)
+ sys.exit(1)
+
+ print ('Now forwarding port %d to %s:%d ...' % (localport, remoteserver, remoteport))
+
+ try:
+ # this is a little convoluted, but lets me configure things for the Handler
+ # object. (SocketServer doesn't give Handlers any way to access the outer
+ # server normally.)
+ class SubHander (Handler):
+ chain_host = remoteserver
+ chain_port = remoteport
+ ssh_transport = client.get_transport()
+ self.service = ForwardServer(('', localport), SubHander)
+ self.service.serve_forever()
+ except KeyboardInterrupt:
+ print 'C-c: Port forwarding stopped.'
+ sys.exit(0)
+
+ def teardown(self):
+ print "ENDE"
+ self.service.shutdown()
+
+
+if __name__ == '__main__':
+ T = SshTunnel()
+ from threading import Timer
+ t = Timer(5.0, T.teardown)
+ t.start() # after 30 seconds, "hello, world" will be printed
+ T.setup(username = 'novi_novi', server = 'novilab.elte.hu', remoteserver = 'complex.elte.hu', keyfile = '/home/steger/Private/ssh/novi_rsa')
+ \ No newline at end of file
diff --git a/Monitoring/src/main/python/Driver/SshTunnel.py.old b/Monitoring/src/main/python/Driver/SshTunnel.py.old
new file mode 100644
index 0000000..83649df
--- /dev/null
+++ b/Monitoring/src/main/python/Driver/SshTunnel.py.old
@@ -0,0 +1,113 @@
+'''
+Created on Jan 14, 2013
+
+@author: steger
+'''
+
+import select
+import SocketServer
+import sys
+
+import paramiko
+
+#FIXME: let an SshTunnel instance launch a new thread;
+#TODO: docs
+#TODO: localport could be retrieved from a pool
+
+class ForwardServer (SocketServer.ThreadingTCPServer):
+ daemon_threads = True
+ allow_reuse_address = True
+
+
+class Handler (SocketServer.BaseRequestHandler):
+
+ def handle(self):
+ try:
+ chan = self.ssh_transport.open_channel('direct-tcpip',
+ (self.chain_host, self.chain_port),
+ self.request.getpeername())
+ except Exception, e:
+ print('Incoming request to %s:%d failed: %s' % (self.chain_host,
+ self.chain_port,
+ repr(e)))
+ return
+ if chan is None:
+ print('Incoming request to %s:%d was rejected by the SSH server.' %
+ (self.chain_host, self.chain_port))
+ return
+
+ print('Connected! Tunnel open %r -> %r -> %r' % (self.request.getpeername(),
+ chan.getpeername(), (self.chain_host, self.chain_port)))
+ while True:
+ r, w, x = select.select([self.request, chan], [], [])
+ if self.request in r:
+ data = self.request.recv(1024)
+ if len(data) == 0:
+ break
+ chan.send(data)
+ if chan in r:
+ data = chan.recv(1024)
+ if len(data) == 0:
+ break
+ self.request.send(data)
+ chan.close()
+ self.request.close()
+ print ('Tunnel closed from %r' % (self.request.getpeername(),))
+
+
+
+
+class SshTunnel(object):
+ '''
+This class establishes a connection to the requested SSH server and sets up local port
+forwarding (the openssh -L option) from a local port through a tunneled
+connection to a destination reachable from the SSH server machine.
+ '''
+ def __init__(self):
+ pass
+
+ def setup(self, localport = 4000, username = 'root', server = 'localhost', serverport = 22, remoteserver = 'localhost', remoteport = 22, keyfile = None, password = None,
+ look_for_keys = False):
+ '''
+ Constructor
+ '''
+ client = paramiko.SSHClient()
+ client.load_system_host_keys()
+ client.set_missing_host_key_policy(paramiko.WarningPolicy())
+
+ print ('Connecting to ssh host %s:%d ...' % (server, serverport))
+ try:
+ client.connect(server, serverport, username = username, key_filename = keyfile,
+ look_for_keys = look_for_keys, password = password)
+ except Exception, e:
+ print '*** Failed to connect to %s:%d: %r' % (server, serverport, e)
+ sys.exit(1)
+
+ print ('Now forwarding port %d to %s:%d ...' % (localport, remoteserver, remoteport))
+
+ try:
+ # this is a little convoluted, but lets me configure things for the Handler
+ # object. (SocketServer doesn't give Handlers any way to access the outer
+ # server normally.)
+ class SubHander (Handler):
+ chain_host = remoteserver
+ chain_port = remoteport
+ ssh_transport = client.get_transport()
+ self.service = ForwardServer(('', localport), SubHander)
+ self.service.serve_forever()
+ except KeyboardInterrupt:
+ print 'C-c: Port forwarding stopped.'
+ sys.exit(0)
+
+ def teardown(self):
+ print "ENDE"
+ self.service.shutdown()
+
+
+if __name__ == '__main__':
+ T = SshTunnel()
+ from threading import Timer
+ t = Timer(5.0, T.teardown)
+ t.start() # after 30 seconds, "hello, world" will be printed
+ T.setup(username = 'novi_novi', server = 'novilab.elte.hu', remoteserver = 'complex.elte.hu', keyfile = '/home/steger/Private/ssh/novi_rsa')
+ \ No newline at end of file
diff --git a/Monitoring/src/main/python/Driver/__init__$py.class b/Monitoring/src/main/python/Driver/__init__$py.class
new file mode 100644
index 0000000..3dd4a49
--- /dev/null
+++ b/Monitoring/src/main/python/Driver/__init__$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Driver/__init__.py b/Monitoring/src/main/python/Driver/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Monitoring/src/main/python/Driver/__init__.py
diff --git a/Monitoring/src/main/python/Driver/__init__.py.old b/Monitoring/src/main/python/Driver/__init__.py.old
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Monitoring/src/main/python/Driver/__init__.py.old
diff --git a/Monitoring/src/main/python/Driver/test.py b/Monitoring/src/main/python/Driver/test.py
new file mode 100644
index 0000000..0a502b2
--- /dev/null
+++ b/Monitoring/src/main/python/Driver/test.py
@@ -0,0 +1,128 @@
+from __future__ import with_statement
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+
+import site
+site.addsitedir('../site-packages')
+
+import unittest2
+from random import randint
+from Example.Resources import PLnodes
+from threading import Thread
+from Example.credentials import noviCredential, sonomaCredential
+from SshExec import SshExec
+from SOAPClient import SOAPClient
+
+class Test(unittest2.TestCase):
+ testnodes = map(lambda x: x.get_ipaddress("eth0")[0], PLnodes)
+ cred_novi = noviCredential
+ url_sonoma = "http://complex.elte.hu/~steger/sonoma/user.wsdl"
+ cred_sonoma = sonomaCredential
+
+
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def gettestnode(self):
+ '''
+ @summary: Return a test node IP address chosen random
+ @return: ip address
+ @rtype: string
+ '''
+ return self.testnodes[randint(1, len(self.testnodes))-1]
+
+ def test_helloworldWithMaster(self):
+ '''
+ @summary: Execute remote command in the name of the testuser authenticated with the master key
+ '''
+ proc = SshExec(host = self.gettestnode(), credential = self.cred_novi)
+ result = proc.execute().read()
+ print "alma %s" % result
+ self.assertTrue(result.startswith("helloworld @ "), "Remote command output differs from expected")
+
+ def echoWithMaster(self, address):
+ '''
+ @summary: Execute remote echo command in the name of the testuser authenticated with the master key
+ @param address: ip address of the remote machine
+ @type address: string
+ '''
+ try:
+ n = randint(0, 10000)
+ command = "echo %d" % n
+ proc = SshExec(host = address, credential = self.cred_novi, command = command)
+ result = proc.execute().read()
+ self.assertTrue(result.strip() == str(n), "Remote command @%s output differs from expected: (%s != %d)" % (address, result, n))
+ except Exception, e:
+ self.assertFalse(True, "Got an error %s" % e)
+
+ def test_echoWithMaster(self):
+ '''
+ @summary: Execute remote echo command in the name of the testuser authenticated with the master key
+ '''
+ self.echoWithMaster(self.gettestnode())
+
+ def test_distributedEcho(self):
+ '''
+ @summary: Execute parallel remote echo commands in a distributed fashion
+ '''
+ threads = []
+ for n in self.testnodes:
+ t = Thread(target = self.echoWithMaster, args = (n,))
+ t.daemon = True
+ t.start()
+ threads.append(t)
+ while len(threads):
+ t = threads.pop()
+ t.join(5)
+
+ def test_parallelEcho(self):
+ '''
+ @summary: Execute parallel remote echo commands in a test node
+ '''
+ N = 20
+ n = self.gettestnode()
+ threads = []
+ while N:
+ N -= 1
+ t = Thread(target = self.echoWithMaster, args = (n,))
+ t.daemon = True
+ t.start()
+ threads.append(t)
+ while len(threads):
+ t = threads.pop()
+ t.join(5)
+
+ def test_stress(self):
+ '''
+ @summary: Consecutively execute parallel remote echo commands in a distributed fashion
+ '''
+ threads = []
+ for n in self.testnodes:
+ N = randint(5, 20)
+ while N:
+ N -= 1
+ t = Thread(target = self.echoWithMaster, args = (n,))
+ t.daemon = True
+ t.start()
+ threads.append(t)
+ while len(threads):
+ t = threads.pop()
+ t.join(5)
+
+ def test_soap(self):
+ '''
+ @summary: Run SONoMA getNodeList
+ '''
+ client = SOAPClient(self.url_sonoma)
+ resources = client.service.getNodeList(filter = "AVAILABLE")
+ self.assertGreater(len(resources), 0, "sonoma reports no nodes")
+
+if __name__ == "__main__":
+ import sys;sys.argv = ['', 'Test.test_stress']
+ unittest2.main()
diff --git a/Monitoring/src/main/python/Driver/test.py.old b/Monitoring/src/main/python/Driver/test.py.old
new file mode 100644
index 0000000..7e93fee
--- /dev/null
+++ b/Monitoring/src/main/python/Driver/test.py.old
@@ -0,0 +1,122 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+import unittest
+from random import randint
+from Example.Resources import PLnodes
+from threading import Thread
+from Example.credentials import noviCredential, sonomaCredential
+from SshExec import SshExec
+from SOAPClient import SOAPClient
+
+class Test(unittest.TestCase):
+ testnodes = map(lambda x: x.get_ipaddress("eth0")[0], PLnodes)
+ cred_novi = noviCredential
+ url_sonoma = "http://complex.elte.hu/~steger/sonoma/user.wsdl"
+ cred_sonoma = sonomaCredential
+
+
+ def setUp(self):
+ pass
+
+ def tearDown(self):
+ pass
+
+ def gettestnode(self):
+ '''
+ @summary: Return a test node IP address chosen random
+ @return: ip address
+ @rtype: string
+ '''
+ return self.testnodes[randint(1, len(self.testnodes))-1]
+
+ def test_helloworldWithMaster(self):
+ '''
+ @summary: Execute remote command in the name of the testuser authenticated with the master key
+ '''
+ proc = SshExec(host = self.gettestnode(), credential = self.cred_novi)
+ result = proc.execute().read()
+ self.assertTrue(result.startswith("helloworld @ "), "Remote command output differs from expected")
+
+ def echoWithMaster(self, address):
+ '''
+ @summary: Execute remote echo command in the name of the testuser authenticated with the master key
+ @param address: ip address of the remote machine
+ @type address: string
+ '''
+ try:
+ n = randint(0, 10000)
+ command = "echo %d" % n
+ proc = SshExec(host = address, credential = self.cred_novi, command = command)
+ result = proc.execute().read()
+ self.assertTrue(result.strip() == str(n), "Remote command @%s output differs from expected: (%s != %d)" % (address, result, n))
+ except Exception, e:
+ self.assertFalse(True, "Got an error %s" % e)
+
+ def test_echoWithMaster(self):
+ '''
+ @summary: Execute remote echo command in the name of the testuser authenticated with the master key
+ '''
+ self.echoWithMaster(self.gettestnode())
+
+ def test_distributedEcho(self):
+ '''
+ @summary: Execute parallel remote echo commands in a distributed fashion
+ '''
+ threads = []
+ for n in self.testnodes:
+ t = Thread(target = self.echoWithMaster, args = (n,))
+ t.daemon = True
+ t.start()
+ threads.append(t)
+ while len(threads):
+ t = threads.pop()
+ t.join(5)
+
+ def test_parallelEcho(self):
+ '''
+ @summary: Execute parallel remote echo commands in a test node
+ '''
+ N = 20
+ n = self.gettestnode()
+ threads = []
+ while N:
+ N -= 1
+ t = Thread(target = self.echoWithMaster, args = (n,))
+ t.daemon = True
+ t.start()
+ threads.append(t)
+ while len(threads):
+ t = threads.pop()
+ t.join(5)
+
+ def test_stress(self):
+ '''
+ @summary: Consecutively execute parallel remote echo commands in a distributed fashion
+ '''
+ threads = []
+ for n in self.testnodes:
+ N = randint(5, 20)
+ while N:
+ N -= 1
+ t = Thread(target = self.echoWithMaster, args = (n,))
+ t.daemon = True
+ t.start()
+ threads.append(t)
+ while len(threads):
+ t = threads.pop()
+ t.join(5)
+
+ def test_soap(self):
+ '''
+ @summary: Run SONoMA getNodeList
+ '''
+ client = SOAPClient(self.url_sonoma)
+ resources = client.service.getNodeList(filter = "AVAILABLE")
+ self.assertGreater(len(resources), 0, "sonoma reports no nodes")
+
+if __name__ == "__main__":
+ #import sys;sys.argv = ['', 'Test.test_helloworldWithMaster']
+ unittest.main() \ No newline at end of file
diff --git a/Monitoring/src/main/python/Example/Dimensions$py.class b/Monitoring/src/main/python/Example/Dimensions$py.class
new file mode 100644
index 0000000..b9a5d83
--- /dev/null
+++ b/Monitoring/src/main/python/Example/Dimensions$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Example/Dimensions.py b/Monitoring/src/main/python/Example/Dimensions.py
new file mode 100644
index 0000000..526ee11
--- /dev/null
+++ b/Monitoring/src/main/python/Example/Dimensions.py
@@ -0,0 +1,42 @@
+'''
+Created on Oct 12, 2011
+
+@author: steger
+@summary: Here we declare some unit models to enable parameter conversions
+'''
+from DataProcessing.Dimension import DimensionManager
+from Example.Units import UM
+from DataProcessing.MeasurementLevel import Nominal, Interval, Ratio
+
+DM = DimensionManager(unitmanager = UM)
+
+basedimensions = [
+ ("Cardinal", "unitless", Nominal),
+ ("NameOfSomething", "unitless", Nominal),
+ ("Countable", "piece", Ratio),
+ ("InformationSize", "bit", Ratio),
+ ("IPAddress", "ipv4dotted", Nominal),
+ ("PointInTime", "unixtimestamp", Interval),
+ ]
+
+deriveddimensions = [
+ ("TimeInterval", "second", "PointInTime", DM.DifferenceDimension),
+ ("Probability", "fraction", "Countable", DM.RatioDimension),
+ ]
+
+for reference, unitreference, measurementlevel in basedimensions:
+ DM.newBaseDimension(reference, reference, UM[unitreference], measurementlevel)
+
+for reference, unitreference, ancestorreference, dimtype in deriveddimensions:
+ DM.newDerivedDimension(reference, reference, UM[unitreference], DM[ancestorreference], dimtype)
+
+
+#Some dimensions explicitely references
+nameofsomething = DM["NameOfSomething"]
+pointintime = DM["PointInTime"]
+timeinterval = DM["TimeInterval"]
+cardinal = DM["Cardinal"]
+countable = DM["Countable"]
+ipaddress = DM["IPAddress"]
+informationsize = DM["InformationSize"]
+probability = DM["Probability"]
diff --git a/Monitoring/src/main/python/Example/Dimensions.py.old b/Monitoring/src/main/python/Example/Dimensions.py.old
new file mode 100644
index 0000000..526ee11
--- /dev/null
+++ b/Monitoring/src/main/python/Example/Dimensions.py.old
@@ -0,0 +1,42 @@
+'''
+Created on Oct 12, 2011
+
+@author: steger
+@summary: Here we declare some unit models to enable parameter conversions
+'''
+from DataProcessing.Dimension import DimensionManager
+from Example.Units import UM
+from DataProcessing.MeasurementLevel import Nominal, Interval, Ratio
+
+DM = DimensionManager(unitmanager = UM)
+
+basedimensions = [
+ ("Cardinal", "unitless", Nominal),
+ ("NameOfSomething", "unitless", Nominal),
+ ("Countable", "piece", Ratio),
+ ("InformationSize", "bit", Ratio),
+ ("IPAddress", "ipv4dotted", Nominal),
+ ("PointInTime", "unixtimestamp", Interval),
+ ]
+
+deriveddimensions = [
+ ("TimeInterval", "second", "PointInTime", DM.DifferenceDimension),
+ ("Probability", "fraction", "Countable", DM.RatioDimension),
+ ]
+
+for reference, unitreference, measurementlevel in basedimensions:
+ DM.newBaseDimension(reference, reference, UM[unitreference], measurementlevel)
+
+for reference, unitreference, ancestorreference, dimtype in deriveddimensions:
+ DM.newDerivedDimension(reference, reference, UM[unitreference], DM[ancestorreference], dimtype)
+
+
+#Some dimensions explicitely references
+nameofsomething = DM["NameOfSomething"]
+pointintime = DM["PointInTime"]
+timeinterval = DM["TimeInterval"]
+cardinal = DM["Cardinal"]
+countable = DM["Countable"]
+ipaddress = DM["IPAddress"]
+informationsize = DM["InformationSize"]
+probability = DM["Probability"]
diff --git a/Monitoring/src/main/python/Example/Metrics$py.class b/Monitoring/src/main/python/Example/Metrics$py.class
new file mode 100644
index 0000000..5f90f94
--- /dev/null
+++ b/Monitoring/src/main/python/Example/Metrics$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Example/Metrics.py b/Monitoring/src/main/python/Example/Metrics.py
new file mode 100644
index 0000000..5765c77
--- /dev/null
+++ b/Monitoring/src/main/python/Example/Metrics.py
@@ -0,0 +1,69 @@
+from __future__ import with_statement
+'''
+Created on Oct 12, 2011
+
+@author: steger
+@summary: Here we declare monitorable metrics and combine them with tools that are measuring them
+'''
+from Example.Tools import sshping, sonomashortping, sshtraceroute, sshmeminfo,\
+ sonomashortchirp
+from DataProcessing.Parameter import ParameterList, Parameter
+from Example.Units import UM, unitless, milli_second, Byte, piece
+from Example.Dimensions import nameofsomething, informationsize,\
+ timeinterval, cardinal, countable
+from Resource.node import node
+from Resource.path import path
+
+class FreeMemory(object):
+ name = 'Free Memory'
+ resourcetype = node
+ p_obligatory = ParameterList()
+ p_optional = ParameterList()
+
+class DiskUsage(object):
+ name = 'Disk Usage'
+ resourcetype = node
+ p_obligatory = ParameterList([
+ Parameter(name = "Directory", valuetype = str, unitmanager = UM, dimension = nameofsomething, default = ('/dev/mapper/planetlab-vservers', unitless))
+ ])
+ p_optional = ParameterList()
+
+class RoundTripDelay(object):
+ name = 'Round Trip Delay'
+ resourcetype = path
+ p_obligatory = ParameterList()
+ p_optional = ParameterList([
+ Parameter(name = "Count", valuetype = int, unitmanager = UM, dimension = countable, default = (5, piece)),
+ Parameter(name = "PacketSize", valuetype = int, unitmanager = UM, dimension = informationsize, default = (64, Byte)),
+ Parameter(name = "Delay", valuetype = float, unitmanager = UM, dimension = timeinterval, default = (200, milli_second)),
+ Parameter(name = "TimeToLive", valuetype = int, unitmanager = UM, dimension = countable, default = (32, piece)),
+ Parameter(name = "Interface", valuetype = str, unitmanager = UM, dimension = nameofsomething, default = ("eth0", unitless)),
+ ])
+
+class OnewayDelay(object):
+ name = 'One Way Delay'
+ resourcetype = path
+ p_obligatory = ParameterList()
+ p_optional = ParameterList([
+ Parameter(name = "Count", valuetype = int, unitmanager = UM, dimension = countable, default = (5, piece)),
+ Parameter(name = "Delay", valuetype = int, unitmanager = UM, dimension = timeinterval, default = (200, milli_second)),
+ Parameter(name = "TimeToLive", valuetype = int, unitmanager = UM, dimension = countable, default = (32, piece)),
+# Parameter(name = "Interface", valuetype = str, unitmanager = UM, dimension = nameofsomething, default = (novi_iface, unitless)),
+ Parameter(name = "PacketSize", valuetype = int, unitmanager = UM, dimension = informationsize, default = (64, Byte)),
+ Parameter(name = "SourcePort", valuetype = int, unitmanager = UM, dimension = cardinal, default = (7777, unitless)),
+ Parameter(name = "DestinationPort", valuetype = int, unitmanager = UM, dimension = cardinal, default = (7777, unitless)),
+ ])
+
+class HopMeasurement(object):
+ name = 'Hop Measurement'
+ resourcetype = path
+ p_obligatory = ParameterList()
+ p_optional = ParameterList()
+
+
+MonitorMetrics = {
+ FreeMemory: [sshmeminfo],
+ RoundTripDelay: [sshping, sonomashortping],
+ OnewayDelay: [sonomashortchirp],
+ HopMeasurement: [sshtraceroute]
+}
diff --git a/Monitoring/src/main/python/Example/Metrics.py.old b/Monitoring/src/main/python/Example/Metrics.py.old
new file mode 100644
index 0000000..e0d2b82
--- /dev/null
+++ b/Monitoring/src/main/python/Example/Metrics.py.old
@@ -0,0 +1,68 @@
+'''
+Created on Oct 12, 2011
+
+@author: steger
+@summary: Here we declare monitorable metrics and combine them with tools that are measuring them
+'''
+from Example.Tools import sshping, sonomashortping, sshtraceroute, sshmeminfo,\
+ sonomashortchirp
+from DataProcessing.Parameter import ParameterList, Parameter
+from Example.Units import UM, unitless, milli_second, Byte, piece
+from Example.Dimensions import nameofsomething, informationsize,\
+ timeinterval, cardinal, countable
+from Resource.node import node
+from Resource.path import path
+
+class FreeMemory(object):
+ name = 'Free Memory'
+ resourcetype = node
+ p_obligatory = ParameterList()
+ p_optional = ParameterList()
+
+class DiskUsage(object):
+ name = 'Disk Usage'
+ resourcetype = node
+ p_obligatory = ParameterList([
+ Parameter(name = "Directory", valuetype = str, unitmanager = UM, dimension = nameofsomething, default = ('/dev/mapper/planetlab-vservers', unitless))
+ ])
+ p_optional = ParameterList()
+
+class RoundTripDelay(object):
+ name = 'Round Trip Delay'
+ resourcetype = path
+ p_obligatory = ParameterList()
+ p_optional = ParameterList([
+ Parameter(name = "Count", valuetype = int, unitmanager = UM, dimension = countable, default = (5, piece)),
+ Parameter(name = "PacketSize", valuetype = int, unitmanager = UM, dimension = informationsize, default = (64, Byte)),
+ Parameter(name = "Delay", valuetype = float, unitmanager = UM, dimension = timeinterval, default = (200, milli_second)),
+ Parameter(name = "TimeToLive", valuetype = int, unitmanager = UM, dimension = countable, default = (32, piece)),
+ Parameter(name = "Interface", valuetype = str, unitmanager = UM, dimension = nameofsomething, default = ("eth0", unitless)),
+ ])
+
+class OnewayDelay(object):
+ name = 'One Way Delay'
+ resourcetype = path
+ p_obligatory = ParameterList()
+ p_optional = ParameterList([
+ Parameter(name = "Count", valuetype = int, unitmanager = UM, dimension = countable, default = (5, piece)),
+ Parameter(name = "Delay", valuetype = int, unitmanager = UM, dimension = timeinterval, default = (200, milli_second)),
+ Parameter(name = "TimeToLive", valuetype = int, unitmanager = UM, dimension = countable, default = (32, piece)),
+# Parameter(name = "Interface", valuetype = str, unitmanager = UM, dimension = nameofsomething, default = (novi_iface, unitless)),
+ Parameter(name = "PacketSize", valuetype = int, unitmanager = UM, dimension = informationsize, default = (64, Byte)),
+ Parameter(name = "SourcePort", valuetype = int, unitmanager = UM, dimension = cardinal, default = (7777, unitless)),
+ Parameter(name = "DestinationPort", valuetype = int, unitmanager = UM, dimension = cardinal, default = (7777, unitless)),
+ ])
+
+class HopMeasurement(object):
+ name = 'Hop Measurement'
+ resourcetype = path
+ p_obligatory = ParameterList()
+ p_optional = ParameterList()
+
+
+MonitorMetrics = {
+ FreeMemory: [sshmeminfo],
+ RoundTripDelay: [sshping, sonomashortping],
+ OnewayDelay: [sonomashortchirp],
+ HopMeasurement: [sshtraceroute]
+}
diff --git a/Monitoring/src/main/python/Example/Platforms$py.class b/Monitoring/src/main/python/Example/Platforms$py.class
new file mode 100644
index 0000000..4b3e43b
--- /dev/null
+++ b/Monitoring/src/main/python/Example/Platforms$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Example/Platforms.py b/Monitoring/src/main/python/Example/Platforms.py
new file mode 100644
index 0000000..b6bf675
--- /dev/null
+++ b/Monitoring/src/main/python/Example/Platforms.py
@@ -0,0 +1,9 @@
+'''
+Created on Nov 20, 2012
+
+@author: steger
+'''
+from Service.mock_framework import Framework
+
+baseurl = '../../information-model/monitoring-model'
+FRAMEWORK = Framework(baseurl, {'PlanetLab': 'config_planetlab.owl', 'FEDERICA': 'config_federica.owl'})
diff --git a/Monitoring/src/main/python/Example/Platforms.py.old b/Monitoring/src/main/python/Example/Platforms.py.old
new file mode 100644
index 0000000..067435a
--- /dev/null
+++ b/Monitoring/src/main/python/Example/Platforms.py.old
@@ -0,0 +1,9 @@
+'''
+Created on Nov 20, 2012
+
+@author: steger
+'''
+from Service.mock_framework import Framework
+
+baseurl = '../../information-model/monitoring-model'
+FRAMEWORK = Framework(baseurl, {'PlanetLab': '../../information-model/monitoring-model/config_planetlab.owl', 'FEDERICA': '../../information-model/monitoring-model/config_federica.owl'})
diff --git a/Monitoring/src/main/python/Example/Prefixes$py.class b/Monitoring/src/main/python/Example/Prefixes$py.class
new file mode 100644
index 0000000..5edc0c2
--- /dev/null
+++ b/Monitoring/src/main/python/Example/Prefixes$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Example/Prefixes.py b/Monitoring/src/main/python/Example/Prefixes.py
new file mode 100644
index 0000000..54fa7b9
--- /dev/null
+++ b/Monitoring/src/main/python/Example/Prefixes.py
@@ -0,0 +1,23 @@
+'''
+Created on Oct 12, 2011
+
+@author: steger
+@summary: Here we declare some unit models to enable parameter conversions
+'''
+from DataProcessing.Prefix import PrefixManager
+
+prefixes = [
+ ('pico', 'p', 10, -12),
+ ('nano', 'n', 10, -9),
+ ('micro', 'mu', 10, -6),
+ ('milli', 'm', 10, -3),
+ ('deco', 'd', 10, 0),
+ ('hecto', 'h', 10, 2),
+ ('kilo', 'k', 10, 3),
+ ('mega', 'M', 10, 6),
+ ('giga', 'G', 10, 9),
+]
+
+PM = PrefixManager()
+for reference, symbol, base, exponent in prefixes:
+ PM.newPrefix(reference, symbol, base, exponent)
diff --git a/Monitoring/src/main/python/Example/Prefixes.py.old b/Monitoring/src/main/python/Example/Prefixes.py.old
new file mode 100644
index 0000000..54fa7b9
--- /dev/null
+++ b/Monitoring/src/main/python/Example/Prefixes.py.old
@@ -0,0 +1,23 @@
+'''
+Created on Oct 12, 2011
+
+@author: steger
+@summary: Here we declare some unit models to enable parameter conversions
+'''
+from DataProcessing.Prefix import PrefixManager
+
+prefixes = [
+ ('pico', 'p', 10, -12),
+ ('nano', 'n', 10, -9),
+ ('micro', 'mu', 10, -6),
+ ('milli', 'm', 10, -3),
+ ('deco', 'd', 10, 0),
+ ('hecto', 'h', 10, 2),
+ ('kilo', 'k', 10, 3),
+ ('mega', 'M', 10, 6),
+ ('giga', 'G', 10, 9),
+]
+
+PM = PrefixManager()
+for reference, symbol, base, exponent in prefixes:
+ PM.newPrefix(reference, symbol, base, exponent)
diff --git a/Monitoring/src/main/python/Example/Resources$py.class b/Monitoring/src/main/python/Example/Resources$py.class
new file mode 100644
index 0000000..9c9e4dd
--- /dev/null
+++ b/Monitoring/src/main/python/Example/Resources$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Example/Resources.py b/Monitoring/src/main/python/Example/Resources.py
new file mode 100644
index 0000000..6c3418d
--- /dev/null
+++ b/Monitoring/src/main/python/Example/Resources.py
@@ -0,0 +1,61 @@
+'''
+Created on Oct 12, 2011
+
+@author: steger
+@summary: Here we define the nodes that can take part in monitoring procedures
+
+@note: how to extract information
+for h in novilab.elte.hu planetlab1-novi.lab.netmode.ece.ntua.gr planetlab2-novi.lab.netmode.ece.ntua.gr smilax1.man.poznan.pl smilax2.man.poznan.pl smilax3.man.poznan.pl smilax4.man.poznan.pl smilax5.man.poznan.pl; do echo -n "\"$h\", "; ssh site_admin@$h -i ~/Private/ssh/novi_rsa /sbin/ifconfig | awk '/^[^[:space:]]/ { iface = $1} /inet addr/ { printf ("(\"%s\", \"%s\"), ", iface, $2) }' | sed s,addr.,,g | sed s/', $'// ; done
+'''
+
+from Resource.node import node
+from Resource.path import path
+from Resource.interface import interface
+from Example.Units import UM
+
+# PL node resources
+direction = interface.INGRESS | interface.EGRESS
+PLnodes = []
+def extendpl(hostname, ifaces):
+ n = node(name = hostname, resourceid = hostname)
+ for iface, ip in ifaces:
+ I = interface(name = iface, resourceid = "%s:%s" % (hostname, iface))
+ ipwu = ip, UM.ipv4dotted
+ if iface == "eth0":
+ I.setvalues(ifacename = iface, address = ipwu, ispublic = True, direction = direction, hostname = hostname)
+ else:
+ I.setvalues(ifacename = iface, address = ipwu, ispublic = False, direction = direction)
+ n.addinterface(I)
+ PLnodes.append(n)
+
+extendpl("novilab.elte.hu", [("eth0", "157.181.175.243"), ("federica", "192.168.29.45"), ("novi", "192.168.28.97"), ("novi_monitoring", "192.168.31.21")])
+extendpl("planetlab1-novi.lab.netmode.ece.ntua.gr", [("eth0", "147.102.22.66"), ("federica", "192.168.29.57"), ("novi", "192.168.28.161"), ("novi_monitoring", "192.168.31.33"), ("tun515-1", "192.168.20.1")])
+extendpl("planetlab2-novi.lab.netmode.ece.ntua.gr", [("eth0", "147.102.22.67"), ("federica", "192.168.29.61"), ("novi", "192.168.28.165"), ("tap514-1", "192.168.20.3")])
+extendpl("smilax1.man.poznan.pl", [("eth0", "150.254.160.19"), ("federica", "192.168.29.21"), ("novi", "192.168.28.29"), ("novi_fia_1", "192.168.32.5"), ("novi_monitoring", "192.168.31.13"), ("tap513-1", "192.168.20.4")])
+#extendpl("smilax2.man.poznan.pl", [("eth0", "150.254.160.20"), ("federica", "192.168.29.25"), ("novi", "192.168.28.33"), ("novi_fia_2", "192.168.32.5")])
+#extendpl("smilax3.man.poznan.pl", [("eth0", "150.254.160.21"), ("federica", "192.168.29.29"), ("novi", "192.168.28.37"), ("novi_fia_2", "192.168.32.17")])
+#extendpl("smilax4.man.poznan.pl", [("eth0", "150.254.160.22"), ("federica", "192.168.29.33"), ("novi", "192.168.28.41")])
+#extendpl("smilax5.man.poznan.pl", [("eth0", "150.254.160.23"), ("federica", "192.168.29.37"), ("novi", "192.168.28.45")])
+
+PLdict = dict(map(lambda x: (x.name, x), PLnodes))
+
+# PL are fully connected over the Internet
+PLpaths = []
+for s in PLdict.values():
+ for d in PLdict.values():
+ if s == d: continue
+ name = "%s->%s" % (s.name, d.name)
+ PLpaths.append( path(name = name, source = s, destination = d) )
+
+
+# FED node resources
+FEDnodes = []
+for nick, addr in [ ("fed.psnc", '192.168.31.1'), ("fed.dfn", '192.168.31.5'), ("fed.garr", '192.168.31.9') ]:
+ n = node(name = nick, resourceid = nick)
+ I = interface(name = "eth0", resourceid = "%s:eth0" % nick)
+ ipwu = (addr, UM.ipv4dotted)
+ I.setvalues(ifacename = "eth0", address = ipwu, ispublic = False, direction = direction)
+ n.addinterface(I)
+ FEDnodes.append(n)
+
+FEDdict = dict(map(lambda x: (x.name, x), FEDnodes))
diff --git a/Monitoring/src/main/python/Example/Resources.py.old b/Monitoring/src/main/python/Example/Resources.py.old
new file mode 100644
index 0000000..453d3c3
--- /dev/null
+++ b/Monitoring/src/main/python/Example/Resources.py.old
@@ -0,0 +1,61 @@
+'''
+Created on Oct 12, 2011
+
+@author: steger
+@summary: Here we define the nodes that can take part in monitoring procedures
+
+@note: how to extract information
+for h in novilab.elte.hu planetlab1-novi.lab.netmode.ece.ntua.gr planetlab2-novi.lab.netmode.ece.ntua.gr smilax1.man.poznan.pl smilax2.man.poznan.pl smilax3.man.poznan.pl smilax4.man.poznan.pl smilax5.man.poznan.pl; do echo -n "\"$h\", "; ssh site_admin@$h -i ~/Private/ssh/novi_rsa /sbin/ifconfig | awk '/^[^[:space:]]/ { iface = $1} /inet addr/ { printf ("(\"%s\", \"%s\"), ", iface, $2) }' | sed s,addr.,,g | sed s/', $'// ; done
+'''
+
+from Resource.node import node
+from Resource.path import path
+from Resource.interface import interface
+from Example.Units import UM
+
+# PL node resources
+direction = interface.INGRESS | interface.EGRESS
+PLnodes = []
+def extendpl(hostname, ifaces):
+ n = node(name = hostname, resourceid = hostname)
+ for iface, ip in ifaces:
+ I = interface(name = iface, resourceid = "%s:%s" % (hostname, iface))
+ ipwu = ip, UM.ipv4dotted
+ if iface == "eth0":
+ I.setvalues(ifacename = iface, address = ipwu, ispublic = True, direction = direction, hostname = hostname)
+ else:
+ I.setvalues(ifacename = iface, address = ipwu, ispublic = False, direction = direction)
+ n.addinterface(I)
+ PLnodes.append(n)
+
+extendpl("novilab.elte.hu", [("eth0", "157.181.175.243"), ("federica", "192.168.29.45"), ("novi", "192.168.28.97"), ("novi_monitoring", "192.168.31.21")])
+extendpl("planetlab1-novi.lab.netmode.ece.ntua.gr", [("eth0", "147.102.22.66"), ("federica", "192.168.29.57"), ("novi", "192.168.28.161"), ("novi_monitoring", "192.168.31.33"), ("tun515-1", "192.168.20.1")])
+extendpl("planetlab2-novi.lab.netmode.ece.ntua.gr", [("eth0", "147.102.22.67"), ("federica", "192.168.29.61"), ("novi", "192.168.28.165"), ("tap514-1", "192.168.20.3")])
+extendpl("smilax1.man.poznan.pl", [("eth0", "150.254.160.19"), ("federica", "192.168.29.21"), ("novi", "192.168.28.29"), ("novi_fia_1", "192.168.32.5"), ("novi_monitoring", "192.168.31.13"), ("tap513-1", "192.168.20.4")])
+extendpl("smilax2.man.poznan.pl", [("eth0", "150.254.160.20"), ("federica", "192.168.29.25"), ("novi", "192.168.28.33"), ("novi_fia_2", "192.168.32.5")])
+extendpl("smilax3.man.poznan.pl", [("eth0", "150.254.160.21"), ("federica", "192.168.29.29"), ("novi", "192.168.28.37"), ("novi_fia_2", "192.168.32.17")])
+extendpl("smilax4.man.poznan.pl", [("eth0", "150.254.160.22"), ("federica", "192.168.29.33"), ("novi", "192.168.28.41")])
+extendpl("smilax5.man.poznan.pl", [("eth0", "150.254.160.23"), ("federica", "192.168.29.37"), ("novi", "192.168.28.45")])
+
+PLdict = dict(map(lambda x: (x.name, x), PLnodes))
+
+# PL are fully connected over the Internet
+PLpaths = []
+for s in PLdict.values():
+ for d in PLdict.values():
+ if s == d: continue
+ name = "%s->%s" % (s.name, d.name)
+ PLpaths.append( path(name = name, source = s, destination = d) )
+
+
+# FED node resources
+FEDnodes = []
+for nick, addr in [ ("fed.psnc", '192.168.31.1'), ("fed.dfn", '192.168.31.5'), ("fed.garr", '192.168.31.9') ]:
+ n = node(name = nick, resourceid = nick)
+ I = interface(name = "eth0", resourceid = "%s:eth0" % nick)
+ ipwu = (addr, UM.ipv4dotted)
+ I.setvalues(ifacename = "eth0", address = ipwu, ispublic = False, direction = direction)
+ n.addinterface(I)
+ FEDnodes.append(n)
+
+FEDdict = dict(map(lambda x: (x.name, x), FEDnodes))
diff --git a/Monitoring/src/main/python/Example/Tools$py.class b/Monitoring/src/main/python/Example/Tools$py.class
new file mode 100644
index 0000000..ce5824a
--- /dev/null
+++ b/Monitoring/src/main/python/Example/Tools$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Example/Tools.py b/Monitoring/src/main/python/Example/Tools.py
new file mode 100644
index 0000000..c4c32cb
--- /dev/null
+++ b/Monitoring/src/main/python/Example/Tools.py
@@ -0,0 +1,312 @@
+from __future__ import with_statement
+'''
+Created on Oct 12, 2011
+
+@author: steger
+@summary: Here we define some monitoring tools and dress them up with parameters and work flow description
+'''
+from DataProcessing.Parameter import Parameter, ParameterList
+from Example.Resources import PLdict
+from Credential.credentialtypes import UsernamePassword, UsernameRSAKey
+from Driver.SOAPClient import SOAPClient
+from Driver.SshExec import SshExec
+from Example.Units import UM, Byte, micro_second, piece, milli_second,\
+ nano_second, unitless, nano_unixtimestamp, unixtimestamp, fraction,\
+ kilo_Byte, second
+from Example.Dimensions import cardinal, countable, ipaddress, timeinterval,\
+ informationsize, pointintime, nameofsomething, probability
+from DataProcessing.DataHeader import DataHeaderGeneratedByDescription
+
+DOM_SUBSTRATE = 1
+DOM_SLICE = 2
+
+sonoma_url = "http://complex.elte.hu/~steger/sonoma/user.wsdl"
+
+nodes = map(lambda x:(x.get_hostname("eth0"), unitless), PLdict.values())
+
+class sonomashortping:
+ driver = SOAPClient
+ name = "SONoMAPing"
+ domain = DOM_SUBSTRATE
+ dataheaderdeclaration = DataHeaderGeneratedByDescription('ping', [('Run', countable),
+ ('Sequence Number', countable),
+ ('Source Address', ipaddress),
+ ('Destination Address', ipaddress),
+ ('Packet Size', informationsize, Byte),
+ ('Time To Live', countable),
+ ('Round Trip Delay', timeinterval, micro_second)])
+
+ authtype = (UsernamePassword, )
+ kwargs = { "url": sonoma_url, "MAserviceport": 11123 }
+ hooks = {
+ "prehook" : """
+from base64 import b64decode
+self.decode = b64decode
+self.pattern = re.compile('^(\d+)\s+(\d+\.\d+\.\d+\.\d+)\s+(\d+\.\d+\.\d+\.\d+)\s+(\d+)\s+(\d+)\s+(\d+)$')
+self.username=self.credential.username
+self.password=self.credential.password
+self.client = self.driver(kw.get('url'))
+self.sessionId = self.client.service.requestSession(self.username, self.password, 'CSV', False)
+self.port = kw.get('MAserviceport')
+self.template = self.data.getTemplate(size = 1)
+ """,
+ "retrievehook" : """
+source = "%s:%d" % (self.parameters.get('SourceAddress', self.um.ipv4dotted), self.port)
+res = self.client.service.shortPing(self.sessionId,
+ source, self.parameters.get('DestinationAddress', self.um.ipv4dotted), self.parameters.get('Count', self.um.piece),
+ self.parameters.get('Delay', self.um.micro_second), self.parameters.get('PacketSize', self.um.Byte))
+rec = self.decode(res).splitlines()
+for r in rec:
+ if self.pattern.match(r):
+ self.template.clear()
+ ex = self.pattern.split(r)[:-1]
+ ex[0] = self.runcount
+ self.template.updateMany( ('Run', 'Sequence Number', 'Source Address', 'Destination Address', 'Packet Size', 'Time To Live', 'Round Trip Delay'), [ex,] )
+ self.data.saveRecord(self.template)
+return True
+ """,
+ "posthook": "self.client.service.closeSession(self.username, self.password, self.sessionId)"}
+ parameters = ParameterList([ Parameter(name = "SourceAddress", valuetype = str, unitmanager = UM, dimension = ipaddress),
+ Parameter(name = "DestinationAddress", valuetype = str, unitmanager = UM, dimension = ipaddress),
+ Parameter(name = "Count", valuetype = int, unitmanager = UM, dimension = countable, default = (5, piece)),
+ Parameter(name = "Delay", valuetype = int, unitmanager = UM, dimension = timeinterval, default = (100, milli_second)),
+ Parameter(name = "PacketSize", valuetype = int, unitmanager = UM, dimension = informationsize, default = (64, Byte)) ])
+
+class sonomashortchirp:
+ driver = SOAPClient
+ name = "SONoMAChirp"
+ domain = DOM_SUBSTRATE
+ dataheaderdeclaration = DataHeaderGeneratedByDescription('onewaydelay', [('Run', countable),
+ ('SequenceNumber', countable),
+ ('SourceAddress', ipaddress),
+ ('DestinationAddress', ipaddress),
+ ('TimestampSend', pointintime, nano_unixtimestamp),
+ ('OnewayDelay', timeinterval, nano_second) ])
+ authtype = (UsernamePassword, )
+ kwargs = { "url": sonoma_url, "MAserviceport": 11123 }
+ hooks = {
+ "prehook" : """
+from base64 import b64decode
+self.decode = b64decode
+self.pattern = re.compile('^(\d+)\s+(\d+\.\d+\.\d+\.\d+)\s+(\d+\.\d+\.\d+\.\d+)\s+(\d+)\s+(\d+)$')
+self.username=self.credential.username
+self.password=self.credential.password
+self.client = self.driver(kw.get('url'))
+self.sessionId = self.client.service.requestSession(self.username, self.password, 'CSV', False)
+self.port = kw.get('MAserviceport')
+self.template = self.data.getTemplate(size = 1)
+self.delaylist = self.client.factory.create("delayList")
+self.delaylist.gap = [100000,100000]
+ """,
+ "retrievehook" : """
+source = "%s:%d" % (self.parameters.get('SourceAddress', self.um.ipv4dotted), self.port)
+destination = "%s:%d" % (self.parameters.get('DestinationAddress', self.um.ipv4dotted), self.port)
+res = self.client.service.shortChirp(self.sessionId,
+ source, self.parameters.get('SourcePort', self.um.unitless),
+ destination, self.parameters.get('DestinationPort', self.um.unitless),
+ self.parameters.get('Count', self.um.piece), self.parameters.get('Delay', self.um.milli_second),
+ self.parameters.get('PacketSize', self.um.Byte), self.delaylist)
+rec = self.decode(res).splitlines()
+data = []
+for r in rec:
+ if self.pattern.match(r):
+ self.template.clear()
+ ex = self.pattern.split(r)[:-1]
+ ex[0] = self.runcount
+ ex[-1] = int(ex[-1])-int(ex[-2])
+ data.append( ex )
+self.template.clear(size = len(data))
+self.template.updateMany( ('Run', 'SequenceNumber', 'SourceAddress', 'DestinationAddress', 'TimestampSend', 'OnewayDelay'), data )
+self.data.saveRecord(self.template)
+return True
+ """,
+ "posthook": "self.client.service.closeSession(self.username, self.password, self.sessionId)"}
+ parameters = ParameterList([ Parameter(name = "SourceAddress", valuetype = str, unitmanager = UM, dimension = ipaddress),
+ Parameter(name = "DestinationAddress", valuetype = str, unitmanager = UM, dimension = ipaddress),
+ Parameter(name = "Count", valuetype = int, unitmanager = UM, dimension = countable, default = (5, piece)),
+ Parameter(name = "Delay", valuetype = int, unitmanager = UM, dimension = timeinterval, default = (100, milli_second)),
+ Parameter(name = "PacketSize", valuetype = int, unitmanager = UM, dimension = informationsize, default = (64, Byte)),
+ Parameter(name = "SourcePort", valuetype = int, unitmanager = UM, dimension = cardinal, default = (7777, unitless)),
+ Parameter(name = "DestinationPort", valuetype = int, unitmanager = UM, dimension = cardinal, default = (7777, unitless)), ])
+
+
+class sshping:
+ driver = SshExec
+ name = "sshPing"
+ domain = DOM_SLICE
+ dataheaderdeclaration = DataHeaderGeneratedByDescription('ping', [('Run', cardinal),
+ ('TimeReceived', pointintime),
+ ('PacketSize', informationsize),
+ ('DestinationAddress', ipaddress),
+ ('SequenceNumber', countable), ('TimeToLive', countable),
+ ('RoundTripDelay', timeinterval, milli_second)])
+ authtype = (UsernameRSAKey, UsernamePassword)
+ kwargs = {}
+ hooks = {
+ "prehook" : """
+self.pattern = re.compile('^\[(\d+\.?\d*)\]\s*(\d+)\s*bytes\s*from\s*(\d+\.\d+\.\d+\.\d+):\s*icmp_req=(\d+)\s*ttl=(\d+)\s*time=(\d+\.?\d*)\s*(\w*)')
+self.template = self.data.getTemplate(size = self.parameters.get('Count', self.um.piece))
+command = "ping -D -n -c %d -i %f -t %d -I %s %s" % (
+ self.parameters.get('Count', self.um.piece), self.parameters.get('Delay', self.um.second),
+ self.parameters.get('TimeToLive', self.um.piece), self.parameters.get('Interface', self.um.unitless),
+ self.parameters.get('DestinationAddress', self.um.ipv4dotted))
+self.client = self.driver(host = self.parameters.get('SourceAddress', self.um.ipv4dotted), credential = self.credential, command = command)
+ """,
+ "retrievehook" : """
+data = []
+for r in self.client.execute().readlines():
+ if self.pattern.match(r):
+ ex = self.pattern.split(r)[:-2]
+ ex[0] = self.runcount
+ data.append( ex )
+self.template.clear(size = len(data))
+self.template.updateMany( ('Run', 'TimeReceived', 'PacketSize', 'DestinationAddress', 'SequenceNumber', 'TimeToLive', 'RoundTripDelay'), data )
+self.data.saveRecord(self.template)
+return True
+ """}
+ parameters = ParameterList([ Parameter(name = "SourceAddress", valuetype = str, unitmanager = UM, dimension = ipaddress),
+ Parameter(name = "DestinationAddress", valuetype = str, unitmanager = UM, dimension = ipaddress),
+ Parameter(name = "Count", valuetype = int, unitmanager = UM, dimension = countable, default = (5, piece)),
+ Parameter(name = "Delay", valuetype = float, unitmanager = UM, dimension = timeinterval, default = (200, milli_second)),
+ Parameter(name = "TimeToLive", valuetype = int, unitmanager = UM, dimension = countable, default = (32, piece)),
+ Parameter(name = "Interface", valuetype = str, unitmanager = UM, dimension = nameofsomething, default = ("eth0", unitless)) ] )
+
+class sshmeminfo:
+ driver = SshExec
+ name = "sshMeminfo"
+ domain = DOM_SLICE | DOM_SUBSTRATE
+ dataheaderdeclaration = DataHeaderGeneratedByDescription('meminfo', [('Run', cardinal),
+ ('AvailableMemory', informationsize),
+ ('FreeMemory', informationsize)])
+ authtype = (UsernameRSAKey, UsernamePassword)
+ kwargs = {}
+ hooks = {
+ "prehook" : """
+self.pattern = re.compile('^(.*):\s*(\d+)\s+(.B)$')
+self.template = self.data.getTemplate(size = 1)
+command = "cat /proc/meminfo"
+self.client = self.driver(host = self.parameters.get('SourceAddress', self.um.ipv4dotted), credential = self.credential, command = command)
+ """,
+ "retrievehook" : """
+self.template.clear()
+self.template.update('Run', (self.runcount,))
+for r in self.client.execute().readlines():
+ if self.pattern.match(r):
+ n, v, u = self.pattern.split(r)[1:-1]
+ if n == 'MemTotal' and u == 'kB':
+ self.template.update('AvailableMemory', (v,))
+ elif n == 'MemFree' and u == 'kB':
+ self.template.update('FreeMemory', (v,))
+self.data.saveRecord(self.template)
+return True
+ """}
+ parameters = ParameterList([ Parameter(name = "SourceAddress", valuetype = str, unitmanager = UM, dimension = ipaddress), ])
+
+class sshdf:
+ driver = SshExec
+ name = "sshDiskinfo"
+ domain = DOM_SLICE | DOM_SUBSTRATE
+ dataheaderdeclaration = DataHeaderGeneratedByDescription('diskinfo', [('Run', cardinal),
+ ('Available', informationsize, kilo_Byte),
+ ('Used', informationsize, kilo_Byte)])
+ authtype = (UsernameRSAKey, UsernamePassword)
+ kwargs = {}
+ hooks = {
+ "prehook" : """
+self.pattern = re.compile('^.*\s+\d+\s+(\d+)\s+(\d+)\s+\d+%\s+.*$')
+self.template = self.data.getTemplate(size = 1)
+command = "df %s" % self.parameters.get('Directory', self.um.unitless)
+self.client = self.driver(host = self.parameters.get('SourceAddress', self.um.ipv4dotted), credential = self.credential, command = command)
+ """,
+ "retrievehook" : """
+self.template.clear()
+self.template.update('Run', (self.runcount,))
+for r in self.client.execute().readlines():
+ if self.pattern.match(r):
+ u, a = self.pattern.split(r)[1:-1]
+ self.template.update('Available', (a,))
+ self.template.update('Used', (u,))
+self.data.saveRecord(self.template)
+return True
+ """}
+ parameters = ParameterList([
+ Parameter(name = "SourceAddress", valuetype = str, unitmanager = UM, dimension = ipaddress),
+ Parameter(name = "Directory", valuetype = str, unitmanager = UM, dimension = nameofsomething),
+ ])
+
+
+class sshtraceroute:
+ driver = SshExec
+ name = "sshTraceroute"
+ domain = DOM_SLICE
+ dataheaderdeclaration = DataHeaderGeneratedByDescription('traceroute', [('Run', cardinal),
+ ('Hop', countable),
+ ('Raw', nameofsomething)])
+ authtype = (UsernameRSAKey, UsernamePassword)
+ kwargs = {}
+ hooks = {
+ "prehook" : """
+self.pattern = re.compile('^\s*(\d+)\s+(.*)$')
+self.template = self.data.getTemplate(size = 1)
+command = "traceroute -n %s" % (self.parameters.get('DestinationAddress', self.um.ipv4dotted))
+self.client = self.driver(host = self.parameters.get('SourceAddress', self.um.ipv4dotted), credential = self.credential, command = command)
+ """,
+ "retrievehook" : """
+data = []
+for r in self.client.execute().readlines():
+ if self.pattern.match(r):
+ ex = self.pattern.split(r)[:-1]
+ ex[0] = self.runcount
+ data.append( ex )
+self.template.clear(size = len(data))
+self.template.updateMany( ('Run', 'Hop', 'Raw'), data )
+self.data.saveRecord(self.template)
+return True
+ """}
+ parameters = ParameterList([ Parameter(name = "SourceAddress", valuetype = str, unitmanager = UM, dimension = ipaddress),
+ Parameter(name = "DestinationAddress", valuetype = str, unitmanager = UM, dimension = ipaddress),
+ Parameter(name = "Count", valuetype = int, unitmanager = UM, dimension = countable, default = (5, piece)), ])
+
+class sshhades:
+ driver = SshExec
+ name = "HADESaggregates"
+ domain = DOM_SUBSTRATE
+ dataheaderdeclaration = DataHeaderGeneratedByDescription('hadestable', [('Run', cardinal),
+ ('Time', pointintime, unixtimestamp),
+ ('MinDelay', timeinterval, second),
+ ('MedianDelay', timeinterval, second),
+ ('MaxDelay', timeinterval, second),
+ ('Loss', probability, fraction),
+ ])
+ authtype = (UsernameRSAKey, UsernamePassword)
+ kwargs = { 'repository': '194.132.52.212', 'samplecount': 9 }
+ hooks = {
+ "prehook" : """
+self.repository = kw.get('repository')
+self.pattern = re.compile('^(\d+)\s+(-?\d+\.?\d*)\s+(-?\d+\.?\d*)\s+(-?\d+\.?\d*)\s+(\d+)\s+.*$')
+self.template = self.data.getTemplate(size = 1)
+lookup = { '192.168.31.1': 'PSNC_FED', '192.168.31.5': 'DFN_FED', '192.168.31.9': 'GARR_FED' }
+root = "/home/novi-monitoring"
+source = lookup[ self.parameters.get('SourceAddress', self.um.ipv4dotted) ]
+destination = lookup[ self.parameters.get('DestinationAddress', self.um.ipv4dotted) ]
+lookupcommand = "echo %s/data/hades/novi/www/*/*/*/%s.%s.0.qos_ai.dat" % (root, source, destination)
+self.client = self.driver(host = self.repository, credential = self.credential)
+files = self.client.execute(lookupcommand).read().split()
+self.command = "%s/hades/bin/hades-show-data.pl --config=novi %s" % (root, files[-1])
+self.nsamples = int(kw.get('samplecount'))
+ """,
+ "retrievehook" : """
+data = []
+for r in self.client.execute(self.command).readlines():
+ print r
+ if self.pattern.match(r):
+ ts, dtmin, dtmed, dtmax, loss = self.pattern.split(r)[1:-1]
+ data.append( [ self.runcount, ts, dtmin, dtmed, dtmax, float(loss)/self.nsamples ] )
+self.template.clear(size = len(data))
+self.template.updateMany( ('Run', 'Time', 'MinDelay', 'MedianDelay', 'MaxDelay', 'Loss'), data )
+self.data.saveRecord(self.template)
+return True
+ """}
+ parameters = ParameterList([ Parameter(name = "SourceAddress", valuetype = str, unitmanager = UM, dimension = ipaddress),
+ Parameter(name = "DestinationAddress", valuetype = str, unitmanager = UM, dimension = ipaddress),
+ ])
diff --git a/Monitoring/src/main/python/Example/Tools.py.old b/Monitoring/src/main/python/Example/Tools.py.old
new file mode 100644
index 0000000..d842d04
--- /dev/null
+++ b/Monitoring/src/main/python/Example/Tools.py.old
@@ -0,0 +1,311 @@
+'''
+Created on Oct 12, 2011
+
+@author: steger
+@summary: Here we define some monitoring tools and dress them up with parameters and work flow description
+'''
+from DataProcessing.Parameter import Parameter, ParameterList
+from Example.Resources import PLdict
+from Credential.credentialtypes import UsernamePassword, UsernameRSAKey
+from Driver.SOAPClient import SOAPClient
+from Driver.SshExec import SshExec
+from Example.Units import UM, Byte, micro_second, piece, milli_second,\
+ nano_second, unitless, nano_unixtimestamp, unixtimestamp, fraction,\
+ kilo_Byte, second
+from Example.Dimensions import cardinal, countable, ipaddress, timeinterval,\
+ informationsize, pointintime, nameofsomething, probability
+from DataProcessing.DataHeader import DataHeaderGeneratedByDescription
+
+DOM_SUBSTRATE = 1
+DOM_SLICE = 2
+
+sonoma_url = "http://complex.elte.hu/~steger/sonoma/user.wsdl"
+
+nodes = map(lambda x:(x.get_hostname("eth0"), unitless), PLdict.values())
+
+class sonomashortping:
+ driver = SOAPClient
+ name = "SONoMAPing"
+ domain = DOM_SUBSTRATE
+ dataheaderdeclaration = DataHeaderGeneratedByDescription('ping', [('Run', countable),
+ ('Sequence Number', countable),
+ ('Source Address', ipaddress),
+ ('Destination Address', ipaddress),
+ ('Packet Size', informationsize, Byte),
+ ('Time To Live', countable),
+ ('Round Trip Delay', timeinterval, micro_second)])
+
+ authtype = (UsernamePassword, )
+ kwargs = { "url": sonoma_url, "MAserviceport": 11123 }
+ hooks = {
+ "prehook" : """
+from base64 import b64decode
+self.decode = b64decode
+self.pattern = re.compile('^(\d+)\s+(\d+\.\d+\.\d+\.\d+)\s+(\d+\.\d+\.\d+\.\d+)\s+(\d+)\s+(\d+)\s+(\d+)$')
+self.username=self.credential.username
+self.password=self.credential.password
+self.client = self.driver(kw.get('url'))
+self.sessionId = self.client.service.requestSession(self.username, self.password, 'CSV', False)
+self.port = kw.get('MAserviceport')
+self.template = self.data.getTemplate(size = 1)
+ """,
+ "retrievehook" : """
+source = "%s:%d" % (self.parameters.get('SourceAddress', self.um.ipv4dotted), self.port)
+res = self.client.service.shortPing(self.sessionId,
+ source, self.parameters.get('DestinationAddress', self.um.ipv4dotted), self.parameters.get('Count', self.um.piece),
+ self.parameters.get('Delay', self.um.micro_second), self.parameters.get('PacketSize', self.um.Byte))
+rec = self.decode(res).splitlines()
+for r in rec:
+ if self.pattern.match(r):
+ self.template.clear()
+ ex = self.pattern.split(r)[:-1]
+ ex[0] = self.runcount
+ self.template.updateMany( ('Run', 'Sequence Number', 'Source Address', 'Destination Address', 'Packet Size', 'Time To Live', 'Round Trip Delay'), [ex,] )
+ self.data.saveRecord(self.template)
+return True
+ """,
+ "posthook": "self.client.service.closeSession(self.username, self.password, self.sessionId)"}
+ parameters = ParameterList([ Parameter(name = "SourceAddress", valuetype = str, unitmanager = UM, dimension = ipaddress),
+ Parameter(name = "DestinationAddress", valuetype = str, unitmanager = UM, dimension = ipaddress),
+ Parameter(name = "Count", valuetype = int, unitmanager = UM, dimension = countable, default = (5, piece)),
+ Parameter(name = "Delay", valuetype = int, unitmanager = UM, dimension = timeinterval, default = (100, milli_second)),
+ Parameter(name = "PacketSize", valuetype = int, unitmanager = UM, dimension = informationsize, default = (64, Byte)) ])
+
+class sonomashortchirp:
+ driver = SOAPClient
+ name = "SONoMAChirp"
+ domain = DOM_SUBSTRATE
+ dataheaderdeclaration = DataHeaderGeneratedByDescription('onewaydelay', [('Run', countable),
+ ('SequenceNumber', countable),
+ ('SourceAddress', ipaddress),
+ ('DestinationAddress', ipaddress),
+ ('TimestampSend', pointintime, nano_unixtimestamp),
+ ('OnewayDelay', timeinterval, nano_second) ])
+ authtype = (UsernamePassword, )
+ kwargs = { "url": sonoma_url, "MAserviceport": 11123 }
+ hooks = {
+ "prehook" : """
+from base64 import b64decode
+self.decode = b64decode
+self.pattern = re.compile('^(\d+)\s+(\d+\.\d+\.\d+\.\d+)\s+(\d+\.\d+\.\d+\.\d+)\s+(\d+)\s+(\d+)$')
+self.username=self.credential.username
+self.password=self.credential.password
+self.client = self.driver(kw.get('url'))
+self.sessionId = self.client.service.requestSession(self.username, self.password, 'CSV', False)
+self.port = kw.get('MAserviceport')
+self.template = self.data.getTemplate(size = 1)
+self.delaylist = self.client.factory.create("delayList")
+self.delaylist.gap = [100000,100000]
+ """,
+ "retrievehook" : """
+source = "%s:%d" % (self.parameters.get('SourceAddress', self.um.ipv4dotted), self.port)
+destination = "%s:%d" % (self.parameters.get('DestinationAddress', self.um.ipv4dotted), self.port)
+res = self.client.service.shortChirp(self.sessionId,
+ source, self.parameters.get('SourcePort', self.um.unitless),
+ destination, self.parameters.get('DestinationPort', self.um.unitless),
+ self.parameters.get('Count', self.um.piece), self.parameters.get('Delay', self.um.milli_second),
+ self.parameters.get('PacketSize', self.um.Byte), self.delaylist)
+rec = self.decode(res).splitlines()
+data = []
+for r in rec:
+ if self.pattern.match(r):
+ self.template.clear()
+ ex = self.pattern.split(r)[:-1]
+ ex[0] = self.runcount
+ ex[-1] = int(ex[-1])-int(ex[-2])
+ data.append( ex )
+self.template.clear(size = len(data))
+self.template.updateMany( ('Run', 'SequenceNumber', 'SourceAddress', 'DestinationAddress', 'TimestampSend', 'OnewayDelay'), data )
+self.data.saveRecord(self.template)
+return True
+ """,
+ "posthook": "self.client.service.closeSession(self.username, self.password, self.sessionId)"}
+ parameters = ParameterList([ Parameter(name = "SourceAddress", valuetype = str, unitmanager = UM, dimension = ipaddress),
+ Parameter(name = "DestinationAddress", valuetype = str, unitmanager = UM, dimension = ipaddress),
+ Parameter(name = "Count", valuetype = int, unitmanager = UM, dimension = countable, default = (5, piece)),
+ Parameter(name = "Delay", valuetype = int, unitmanager = UM, dimension = timeinterval, default = (100, milli_second)),
+ Parameter(name = "PacketSize", valuetype = int, unitmanager = UM, dimension = informationsize, default = (64, Byte)),
+ Parameter(name = "SourcePort", valuetype = int, unitmanager = UM, dimension = cardinal, default = (7777, unitless)),
+ Parameter(name = "DestinationPort", valuetype = int, unitmanager = UM, dimension = cardinal, default = (7777, unitless)), ])
+
+
+class sshping:
+ driver = SshExec
+ name = "sshPing"
+ domain = DOM_SLICE
+ dataheaderdeclaration = DataHeaderGeneratedByDescription('ping', [('Run', cardinal),
+ ('TimeReceived', pointintime),
+ ('PacketSize', informationsize),
+ ('DestinationAddress', ipaddress),
+ ('SequenceNumber', countable), ('TimeToLive', countable),
+ ('RoundTripDelay', timeinterval, milli_second)])
+ authtype = (UsernameRSAKey, UsernamePassword)
+ kwargs = {}
+ hooks = {
+ "prehook" : """
+self.pattern = re.compile('^\[(\d+\.?\d*)\]\s*(\d+)\s*bytes\s*from\s*(\d+\.\d+\.\d+\.\d+):\s*icmp_req=(\d+)\s*ttl=(\d+)\s*time=(\d+\.?\d*)\s*(\w*)')
+self.template = self.data.getTemplate(size = self.parameters.get('Count', self.um.piece))
+command = "ping -D -n -c %d -i %f -t %d -I %s %s" % (
+ self.parameters.get('Count', self.um.piece), self.parameters.get('Delay', self.um.second),
+ self.parameters.get('TimeToLive', self.um.piece), self.parameters.get('Interface', self.um.unitless),
+ self.parameters.get('DestinationAddress', self.um.ipv4dotted))
+self.client = self.driver(host = self.parameters.get('SourceAddress', self.um.ipv4dotted), credential = self.credential, command = command)
+ """,
+ "retrievehook" : """
+data = []
+for r in self.client.execute().readlines():
+ if self.pattern.match(r):
+ ex = self.pattern.split(r)[:-2]
+ ex[0] = self.runcount
+ data.append( ex )
+self.template.clear(size = len(data))
+self.template.updateMany( ('Run', 'TimeReceived', 'PacketSize', 'DestinationAddress', 'SequenceNumber', 'TimeToLive', 'RoundTripDelay'), data )
+self.data.saveRecord(self.template)
+return True
+ """}
+ parameters = ParameterList([ Parameter(name = "SourceAddress", valuetype = str, unitmanager = UM, dimension = ipaddress),
+ Parameter(name = "DestinationAddress", valuetype = str, unitmanager = UM, dimension = ipaddress),
+ Parameter(name = "Count", valuetype = int, unitmanager = UM, dimension = countable, default = (5, piece)),
+ Parameter(name = "Delay", valuetype = float, unitmanager = UM, dimension = timeinterval, default = (200, milli_second)),
+ Parameter(name = "TimeToLive", valuetype = int, unitmanager = UM, dimension = countable, default = (32, piece)),
+ Parameter(name = "Interface", valuetype = str, unitmanager = UM, dimension = nameofsomething, default = ("eth0", unitless)) ] )
+
+class sshmeminfo:
+ driver = SshExec
+ name = "sshMeminfo"
+ domain = DOM_SLICE | DOM_SUBSTRATE
+ dataheaderdeclaration = DataHeaderGeneratedByDescription('meminfo', [('Run', cardinal),
+ ('AvailableMemory', informationsize),
+ ('FreeMemory', informationsize)])
+ authtype = (UsernameRSAKey, UsernamePassword)
+ kwargs = {}
+ hooks = {
+ "prehook" : """
+self.pattern = re.compile('^(.*):\s*(\d+)\s+(.B)$')
+self.template = self.data.getTemplate(size = 1)
+command = "cat /proc/meminfo"
+self.client = self.driver(host = self.parameters.get('SourceAddress', self.um.ipv4dotted), credential = self.credential, command = command)
+ """,
+ "retrievehook" : """
+self.template.clear()
+self.template.update('Run', (self.runcount,))
+for r in self.client.execute().readlines():
+ if self.pattern.match(r):
+ n, v, u = self.pattern.split(r)[1:-1]
+ if n == 'MemTotal' and u == 'kB':
+ self.template.update('AvailableMemory', (v,))
+ elif n == 'MemFree' and u == 'kB':
+ self.template.update('FreeMemory', (v,))
+self.data.saveRecord(self.template)
+return True
+ """}
+ parameters = ParameterList([ Parameter(name = "SourceAddress", valuetype = str, unitmanager = UM, dimension = ipaddress), ])
+
+class sshdf:
+ driver = SshExec
+ name = "sshDiskinfo"
+ domain = DOM_SLICE | DOM_SUBSTRATE
+ dataheaderdeclaration = DataHeaderGeneratedByDescription('diskinfo', [('Run', cardinal),
+ ('Available', informationsize, kilo_Byte),
+ ('Used', informationsize, kilo_Byte)])
+ authtype = (UsernameRSAKey, UsernamePassword)
+ kwargs = {}
+ hooks = {
+ "prehook" : """
+self.pattern = re.compile('^.*\s+\d+\s+(\d+)\s+(\d+)\s+\d+%\s+.*$')
+self.template = self.data.getTemplate(size = 1)
+command = "df %s" % self.parameters.get('Directory', self.um.unitless)
+self.client = self.driver(host = self.parameters.get('SourceAddress', self.um.ipv4dotted), credential = self.credential, command = command)
+ """,
+ "retrievehook" : """
+self.template.clear()
+self.template.update('Run', (self.runcount,))
+for r in self.client.execute().readlines():
+ if self.pattern.match(r):
+ u, a = self.pattern.split(r)[1:-1]
+ self.template.update('Available', (a,))
+ self.template.update('Used', (u,))
+self.data.saveRecord(self.template)
+return True
+ """}
+ parameters = ParameterList([
+ Parameter(name = "SourceAddress", valuetype = str, unitmanager = UM, dimension = ipaddress),
+ Parameter(name = "Directory", valuetype = str, unitmanager = UM, dimension = nameofsomething),
+ ])
+
+
+class sshtraceroute:
+ driver = SshExec
+ name = "sshTraceroute"
+ domain = DOM_SLICE
+ dataheaderdeclaration = DataHeaderGeneratedByDescription('traceroute', [('Run', cardinal),
+ ('Hop', countable),
+ ('Raw', nameofsomething)])
+ authtype = (UsernameRSAKey, UsernamePassword)
+ kwargs = {}
+ hooks = {
+ "prehook" : """
+self.pattern = re.compile('^\s*(\d+)\s+(.*)$')
+self.template = self.data.getTemplate(size = 1)
+command = "traceroute -n %s" % (self.parameters.get('DestinationAddress', self.um.ipv4dotted))
+self.client = self.driver(host = self.parameters.get('SourceAddress', self.um.ipv4dotted), credential = self.credential, command = command)
+ """,
+ "retrievehook" : """
+data = []
+for r in self.client.execute().readlines():
+ if self.pattern.match(r):
+ ex = self.pattern.split(r)[:-1]
+ ex[0] = self.runcount
+ data.append( ex )
+self.template.clear(size = len(data))
+self.template.updateMany( ('Run', 'Hop', 'Raw'), data )
+self.data.saveRecord(self.template)
+return True
+ """}
+ parameters = ParameterList([ Parameter(name = "SourceAddress", valuetype = str, unitmanager = UM, dimension = ipaddress),
+ Parameter(name = "DestinationAddress", valuetype = str, unitmanager = UM, dimension = ipaddress),
+ Parameter(name = "Count", valuetype = int, unitmanager = UM, dimension = countable, default = (5, piece)), ])
+
+class sshhades:
+ driver = SshExec
+ name = "HADESaggregates"
+ domain = DOM_SUBSTRATE
+ dataheaderdeclaration = DataHeaderGeneratedByDescription('hadestable', [('Run', cardinal),
+ ('Time', pointintime, unixtimestamp),
+ ('MinDelay', timeinterval, second),
+ ('MedianDelay', timeinterval, second),
+ ('MaxDelay', timeinterval, second),
+ ('Loss', probability, fraction),
+ ])
+ authtype = (UsernameRSAKey, UsernamePassword)
+ kwargs = { 'repository': '194.132.52.212', 'samplecount': 9 }
+ hooks = {
+ "prehook" : """
+self.repository = kw.get('repository')
+self.pattern = re.compile('^(\d+)\s+(-?\d+\.?\d*)\s+(-?\d+\.?\d*)\s+(-?\d+\.?\d*)\s+(\d+)\s+.*$')
+self.template = self.data.getTemplate(size = 1)
+lookup = { '192.168.31.1': 'PSNC_FED', '192.168.31.5': 'DFN_FED', '192.168.31.9': 'GARR_FED' }
+root = "/home/novi-monitoring"
+source = lookup[ self.parameters.get('SourceAddress', self.um.ipv4dotted) ]
+destination = lookup[ self.parameters.get('DestinationAddress', self.um.ipv4dotted) ]
+lookupcommand = "echo %s/data/hades/novi/www/*/*/*/%s.%s.0.qos_ai.dat" % (root, source, destination)
+self.client = self.driver(host = self.repository, credential = self.credential)
+files = self.client.execute(lookupcommand).read().split()
+self.command = "%s/hades/bin/hades-show-data.pl --config=novi %s" % (root, files[-1])
+self.nsamples = int(kw.get('samplecount'))
+ """,
+ "retrievehook" : """
+data = []
+for r in self.client.execute(self.command).readlines():
+ print r
+ if self.pattern.match(r):
+ ts, dtmin, dtmed, dtmax, loss = self.pattern.split(r)[1:-1]
+ data.append( [ self.runcount, ts, dtmin, dtmed, dtmax, float(loss)/self.nsamples ] )
+self.template.clear(size = len(data))
+self.template.updateMany( ('Run', 'Time', 'MinDelay', 'MedianDelay', 'MaxDelay', 'Loss'), data )
+self.data.saveRecord(self.template)
+return True
+ """}
+ parameters = ParameterList([ Parameter(name = "SourceAddress", valuetype = str, unitmanager = UM, dimension = ipaddress),
+ Parameter(name = "DestinationAddress", valuetype = str, unitmanager = UM, dimension = ipaddress),
+ ])
diff --git a/Monitoring/src/main/python/Example/Units$py.class b/Monitoring/src/main/python/Example/Units$py.class
new file mode 100644
index 0000000..ce17646
--- /dev/null
+++ b/Monitoring/src/main/python/Example/Units$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Example/Units.py b/Monitoring/src/main/python/Example/Units.py
new file mode 100644
index 0000000..c224448
--- /dev/null
+++ b/Monitoring/src/main/python/Example/Units.py
@@ -0,0 +1,81 @@
+from __future__ import with_statement
+'''
+Created on Oct 12, 2011
+
+@author: steger
+@summary: Here we declare some unit models to enable parameter conversions
+'''
+from DataProcessing.Unit import UnitManager
+from Example.Prefixes import PM
+from DataProcessing.DataError import PrefixError
+
+UM = UnitManager()
+
+def getPrefixBySymbol(symbol):
+ '''
+ @summary: look up the prefix in the PrefixManager based on its symbol
+ @param symbol: the symbol of the unit prefix
+ @type symbol: str
+ @return: the unit prefix found
+ @rtype: Prefix
+ @raise PrefixError: Prefix with symbol not found
+ '''
+ for prefix in PM.prefixes.values():
+ if prefix.symbol == symbol:
+ return prefix
+ raise PrefixError("Prefix with symbol %s not found" % symbol)
+
+
+basicunits = [
+ ("piece", "(1)", None),
+ ("unitless", "", None),
+ ("fraction", "", None),
+ ("second", "s", ['m', 'mu', 'n', 'p']),
+ ("unixtimestamp", "tss", ['n']),
+ ("ipv4dotted", "", None),
+ ("bit", "bit", ['k', 'M' ]),
+ ]
+
+lintransformedunits = [
+ ("dozen", "(12)", "piece", 12, None),
+ ("Byte", "B", "bit", 8, ['k', 'M' ]),
+ ]
+
+def storeprefixes(u, prefixes):
+ if prefixes:
+ for ps in prefixes:
+ p = getPrefixBySymbol(ps)
+ nr = "%s_%s" % (p.reference, u.reference)
+ ns = "%s%s" % (p.symbol, u.symbol)
+ UM.addLinearTransformedUnit(nr, ns, u, p.scale)
+
+for reference, symbol, prefixes in basicunits:
+ u = UM.newBasicUnit(reference, symbol)
+ storeprefixes(u, prefixes)
+
+for reference, symbol, ancientref, scale, prefixes in lintransformedunits:
+ u = UM.addLinearTransformedUnit(reference, symbol, UM[ancientref], scale)
+ storeprefixes(u, prefixes)
+
+
+# Some units explicitely referenced
+pico_second = UM["pico_second"]
+nano_second = UM["nano_second"]
+micro_second = UM["micro_second"]
+milli_second = UM["milli_second"]
+second = UM["second"]
+
+Byte = UM["Byte"]
+kilo_Byte = UM["kilo_Byte"]
+
+piece = UM["piece"]
+dozen = UM["dozen"]
+
+unitless = UM["unitless"]
+
+unixtimestamp = UM["unixtimestamp"]
+nano_unixtimestamp = UM["nano_unixtimestamp"]
+
+fraction = UM["fraction"]
+
+ipv4dotted = UM["ipv4dotted"] \ No newline at end of file
diff --git a/Monitoring/src/main/python/Example/Units.py.old b/Monitoring/src/main/python/Example/Units.py.old
new file mode 100644
index 0000000..83ac386
--- /dev/null
+++ b/Monitoring/src/main/python/Example/Units.py.old
@@ -0,0 +1,80 @@
+'''
+Created on Oct 12, 2011
+
+@author: steger
+@summary: Here we declare some unit models to enable parameter conversions
+'''
+from DataProcessing.Unit import UnitManager
+from Example.Prefixes import PM
+from DataProcessing.DataError import PrefixError
+
+UM = UnitManager()
+
+def getPrefixBySymbol(symbol):
+ '''
+ @summary: look up the prefix in the PrefixManager based on its symbol
+ @param symbol: the symbol of the unit prefix
+ @type symbol: str
+ @return: the unit prefix found
+ @rtype: Prefix
+ @raise PrefixError: Prefix with symbol not found
+ '''
+ for prefix in PM.prefixes.values():
+ if prefix.symbol == symbol:
+ return prefix
+ raise PrefixError("Prefix with symbol %s not found" % symbol)
+
+
+basicunits = [
+ ("piece", "(1)", None),
+ ("unitless", "", None),
+ ("fraction", "", None),
+ ("second", "s", ['m', 'mu', 'n', 'p']),
+ ("unixtimestamp", "tss", ['n']),
+ ("ipv4dotted", "", None),
+ ("bit", "bit", ['k', 'M' ]),
+ ]
+
+lintransformedunits = [
+ ("dozen", "(12)", "piece", 12, None),
+ ("Byte", "B", "bit", 8, ['k', 'M' ]),
+ ]
+
+def storeprefixes(u, prefixes):
+ if prefixes:
+ for ps in prefixes:
+ p = getPrefixBySymbol(ps)
+ nr = "%s_%s" % (p.reference, u.reference)
+ ns = "%s%s" % (p.symbol, u.symbol)
+ UM.addLinearTransformedUnit(nr, ns, u, p.scale)
+
+for reference, symbol, prefixes in basicunits:
+ u = UM.newBasicUnit(reference, symbol)
+ storeprefixes(u, prefixes)
+
+for reference, symbol, ancientref, scale, prefixes in lintransformedunits:
+ u = UM.addLinearTransformedUnit(reference, symbol, UM[ancientref], scale)
+ storeprefixes(u, prefixes)
+
+
+# Some units explicitely referenced
+pico_second = UM["pico_second"]
+nano_second = UM["nano_second"]
+micro_second = UM["micro_second"]
+milli_second = UM["milli_second"]
+second = UM["second"]
+
+Byte = UM["Byte"]
+kilo_Byte = UM["kilo_Byte"]
+
+piece = UM["piece"]
+dozen = UM["dozen"]
+
+unitless = UM["unitless"]
+
+unixtimestamp = UM["unixtimestamp"]
+nano_unixtimestamp = UM["nano_unixtimestamp"]
+
+fraction = UM["fraction"]
+
+ipv4dotted = UM["ipv4dotted"] \ No newline at end of file
diff --git a/Monitoring/src/main/python/Example/__init__$py.class b/Monitoring/src/main/python/Example/__init__$py.class
new file mode 100644
index 0000000..dda2498
--- /dev/null
+++ b/Monitoring/src/main/python/Example/__init__$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Example/__init__.py b/Monitoring/src/main/python/Example/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Monitoring/src/main/python/Example/__init__.py
diff --git a/Monitoring/src/main/python/Example/__init__.py.old b/Monitoring/src/main/python/Example/__init__.py.old
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Monitoring/src/main/python/Example/__init__.py.old
diff --git a/Monitoring/src/main/python/Example/credentials$py.class b/Monitoring/src/main/python/Example/credentials$py.class
new file mode 100644
index 0000000..137c562
--- /dev/null
+++ b/Monitoring/src/main/python/Example/credentials$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Example/credentials.py b/Monitoring/src/main/python/Example/credentials.py
new file mode 100644
index 0000000..0c167c4
--- /dev/null
+++ b/Monitoring/src/main/python/Example/credentials.py
@@ -0,0 +1,27 @@
+'''
+Created on Oct 27, 2011
+
+@author: steger
+'''
+from Credential.credentialtypes import UsernameRSAKey, UsernamePassword
+from os import path
+
+noviCredentialIARGS = { 'username': 'novi_novi', 'rsakey': path.expanduser("~/Private/ssh/novi_rsa") }
+noviCredential = UsernameRSAKey(**noviCredentialIARGS)
+
+novisaCredentialIARGS = { 'username': 'root', 'rsakey': path.expanduser("~/Private/ssh/novi_rsa") }
+novisaCredential = UsernameRSAKey(**novisaCredentialIARGS)
+
+novihadesCredentialIARGS = { 'username': 'novi-monitoring', 'rsakey': path.expanduser("~/Private/ssh/novimonitoring_rsa") }
+novihadesCredential = UsernameRSAKey(**novihadesCredentialIARGS)
+
+sonomaCredentialIARGS = {'username': "guest", 'password': "guest"}
+sonomaCredential = UsernamePassword(**sonomaCredentialIARGS)
+
+#mykeyring = [ noviCredentialIARGS, sonomaCredentialIARGS, novihadesCredentialIARGS ]
+
+ple_credentials = [ novisaCredentialIARGS, sonomaCredentialIARGS ]
+fed_credentials = [ novisaCredentialIARGS, novihadesCredentialIARGS ]
+
+if __name__ == '__main__':
+ pass \ No newline at end of file
diff --git a/Monitoring/src/main/python/Example/credentials.py.old b/Monitoring/src/main/python/Example/credentials.py.old
new file mode 100644
index 0000000..0c167c4
--- /dev/null
+++ b/Monitoring/src/main/python/Example/credentials.py.old
@@ -0,0 +1,27 @@
+'''
+Created on Oct 27, 2011
+
+@author: steger
+'''
+from Credential.credentialtypes import UsernameRSAKey, UsernamePassword
+from os import path
+
+noviCredentialIARGS = { 'username': 'novi_novi', 'rsakey': path.expanduser("~/Private/ssh/novi_rsa") }
+noviCredential = UsernameRSAKey(**noviCredentialIARGS)
+
+novisaCredentialIARGS = { 'username': 'root', 'rsakey': path.expanduser("~/Private/ssh/novi_rsa") }
+novisaCredential = UsernameRSAKey(**novisaCredentialIARGS)
+
+novihadesCredentialIARGS = { 'username': 'novi-monitoring', 'rsakey': path.expanduser("~/Private/ssh/novimonitoring_rsa") }
+novihadesCredential = UsernameRSAKey(**novihadesCredentialIARGS)
+
+sonomaCredentialIARGS = {'username': "guest", 'password': "guest"}
+sonomaCredential = UsernamePassword(**sonomaCredentialIARGS)
+
+#mykeyring = [ noviCredentialIARGS, sonomaCredentialIARGS, novihadesCredentialIARGS ]
+
+ple_credentials = [ novisaCredentialIARGS, sonomaCredentialIARGS ]
+fed_credentials = [ novisaCredentialIARGS, novihadesCredentialIARGS ]
+
+if __name__ == '__main__':
+ pass \ No newline at end of file
diff --git a/Monitoring/src/main/python/Resource/__init__$py.class b/Monitoring/src/main/python/Resource/__init__$py.class
new file mode 100644
index 0000000..e516ea7
--- /dev/null
+++ b/Monitoring/src/main/python/Resource/__init__$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Resource/__init__.py b/Monitoring/src/main/python/Resource/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Monitoring/src/main/python/Resource/__init__.py
diff --git a/Monitoring/src/main/python/Resource/__init__.py.old b/Monitoring/src/main/python/Resource/__init__.py.old
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Monitoring/src/main/python/Resource/__init__.py.old
diff --git a/Monitoring/src/main/python/Resource/interface$py.class b/Monitoring/src/main/python/Resource/interface$py.class
new file mode 100644
index 0000000..a4209ac
--- /dev/null
+++ b/Monitoring/src/main/python/Resource/interface$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Resource/interface.py b/Monitoring/src/main/python/Resource/interface.py
new file mode 100644
index 0000000..e6f28d5
--- /dev/null
+++ b/Monitoring/src/main/python/Resource/interface.py
@@ -0,0 +1,79 @@
+'''
+Created on Jul 11, 2012
+
+@author: steger
+'''
+from Resource.resource import resource
+
+class interface(resource):
+ '''
+ classdocs
+ '''
+ UNDEFINED = 0
+ INGRESS = 1
+ EGRESS = 2
+
+ def __init__(self, name = None, resourceid = None):
+ resource.__init__(self, name, resourceid)
+ self._public = False
+ self._direction = self.UNDEFINED
+ self._iface = None
+ self._address = None
+ self._hostname = None
+
+ def setvalues(self, ifacename, address, ispublic = False, direction = 0, hostname = None):
+ self.interface = ifacename
+ self.address = address
+ self.ispublic = ispublic
+ self.direction = direction
+ self.hostname = hostname
+
+ def _get_ispublic(self):
+ if not self._iface:
+ raise Exception("No interface name defined yet for %s" % self.resourceid)
+ if not self._address:
+ raise Exception("No address defined yet for %s" % self.resourceid)
+ return self._public
+ def _set_ispublic(self, ispublic):
+ if isinstance(ispublic, bool):
+ self._public = ispublic
+ else:
+ self._public = False
+
+ def _get_hostname(self):
+ return self._hostname
+ def _set_hostname(self, hostname):
+ self._hostname = self.ipret(hostname)
+
+ def _get_address(self):
+ return self._address
+ def _set_address(self, address):
+ if isinstance(address, tuple):
+ self._address = address
+ else:
+ self._address = self.ipret(address)
+
+ def _get_interface(self):
+ return self._iface
+ def _set_interface(self, iface):
+ self._iface = self.ipret(iface)
+
+ def _get_direction(self):
+ return self._direction
+ def _set_direction(self, direction):
+ self._direction = direction & (self.INGRESS | self.EGRESS)
+
+ def _get_isvalid(self):
+ return self.address and self.interface
+
+ direction = property(_get_direction,_set_direction,None)
+
+ ispublic = property(_get_ispublic,_set_ispublic,None)
+
+ isvalid = property(_get_isvalid,None,None)
+
+ hostname = property(_get_hostname,_set_hostname,None)
+
+ address = property(_get_address,_set_address,None)
+
+ interface = property(_get_interface,_set_interface,None)
diff --git a/Monitoring/src/main/python/Resource/interface.py.old b/Monitoring/src/main/python/Resource/interface.py.old
new file mode 100644
index 0000000..db7b2d3
--- /dev/null
+++ b/Monitoring/src/main/python/Resource/interface.py.old
@@ -0,0 +1,78 @@
+'''
+Created on Jul 11, 2012
+
+@author: steger
+'''
+from Resource.resource import resource
+
+class interface(resource):
+ '''
+ classdocs
+ '''
+ UNDEFINED = 0
+ INGRESS = 1
+ EGRESS = 2
+
+ def __init__(self, name = None, resourceid = None):
+ resource.__init__(self, name, resourceid)
+ self._public = False
+ self._direction = self.UNDEFINED
+ self._iface = None
+ self._address = None
+ self._hostname = None
+
+ def setvalues(self, ifacename, address, ispublic = False, direction = 0, hostname = None):
+ self.interface = ifacename
+ self.address = address
+ self.ispublic = ispublic
+ self.direction = direction
+ self.hostname = hostname
+
+ @property
+ def ispublic(self):
+ if not self._iface:
+ raise Exception("No interface name defined yet for %s" % self.resourceid)
+ if not self._address:
+ raise Exception("No address defined yet for %s" % self.resourceid)
+ return self._public
+ @ispublic.setter
+ def ispublic(self, ispublic):
+ if isinstance(ispublic, bool):
+ self._public = ispublic
+ else:
+ self._public = False
+
+ @property
+ def hostname(self):
+ return self._hostname
+ @hostname.setter
+ def hostname(self, hostname):
+ self._hostname = self.ipret(hostname)
+
+ @property
+ def address(self):
+ return self._address
+ @address.setter
+ def address(self, address):
+ if isinstance(address, tuple):
+ self._address = address
+ else:
+ self._address = self.ipret(address)
+
+ @property
+ def interface(self):
+ return self._iface
+ @interface.setter
+ def interface(self, iface):
+ self._iface = self.ipret(iface)
+
+ @property
+ def direction(self):
+ return self._direction
+ @direction.setter
+ def direction(self, direction):
+ self._direction = direction & (self.INGRESS | self.EGRESS)
+
+ @property
+ def isvalid(self):
+ return self.address and self.interface
diff --git a/Monitoring/src/main/python/Resource/link$py.class b/Monitoring/src/main/python/Resource/link$py.class
new file mode 100644
index 0000000..c11e052
--- /dev/null
+++ b/Monitoring/src/main/python/Resource/link$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Resource/link.py b/Monitoring/src/main/python/Resource/link.py
new file mode 100644
index 0000000..4204442
--- /dev/null
+++ b/Monitoring/src/main/python/Resource/link.py
@@ -0,0 +1,33 @@
+'''
+Created on May 31, 2012
+
+@author: steger
+'''
+from Resource.resource import resource
+from Resource.node import node
+from Resource.interface import interface
+
+class link(resource):
+ def __init__(self, name = None, resourceid = None, source = None, destination = None):
+ resource.__init__(self, name, resourceid)
+ self._source = source
+ self._destination = destination
+
+ def _get_source(self):
+ return self._source
+ def _set_source(self, source):
+ if isinstance(source, interface): #laki
+ self._source = source
+ def _del_source(self):
+ self._source = None
+
+ def _get_destination(self):
+ return self._destination
+ def _set_destination(self, destination):
+ if isinstance(destination, interface): #laki
+ self._destination = destination
+ def _del_destination(self):
+ self._destination = None
+ source = property(_get_source,_set_source,_del_source)
+
+ destination = property(_get_destination,_set_destination,_del_destination)
diff --git a/Monitoring/src/main/python/Resource/link.py.old b/Monitoring/src/main/python/Resource/link.py.old
new file mode 100644
index 0000000..6e32dd9
--- /dev/null
+++ b/Monitoring/src/main/python/Resource/link.py.old
@@ -0,0 +1,35 @@
+'''
+Created on May 31, 2012
+
+@author: steger
+'''
+from Resource.resource import resource
+from Resource.node import node
+
+class link(resource):
+ def __init__(self, name = None, resourceid = None, source = None, destination = None):
+ resource.__init__(self, name, resourceid)
+ self._source = source
+ self._destination = destination
+
+ @property
+ def source(self):
+ return self._source
+ @source.setter
+ def source(self, source):
+ if isinstance(source, node):
+ self._source = source
+ @source.deleter
+ def source(self):
+ self._source = None
+
+ @property
+ def destination(self):
+ return self._destination
+ @destination.setter
+ def destination(self, destination):
+ if isinstance(destination, node):
+ self._destination = destination
+ @destination.deleter
+ def destination(self):
+ self._destination = None \ No newline at end of file
diff --git a/Monitoring/src/main/python/Resource/node$py.class b/Monitoring/src/main/python/Resource/node$py.class
new file mode 100644
index 0000000..373fc57
--- /dev/null
+++ b/Monitoring/src/main/python/Resource/node$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Resource/node.py b/Monitoring/src/main/python/Resource/node.py
new file mode 100644
index 0000000..fe2fc38
--- /dev/null
+++ b/Monitoring/src/main/python/Resource/node.py
@@ -0,0 +1,49 @@
+'''
+Created on May 31, 2012
+
+@author: steger
+'''
+from Resource.resource import resource
+from Resource.interface import interface
+
+class node(resource):
+ def __init__(self, name = None, resourceid = None):
+ resource.__init__(self, name, resourceid)
+ self._public = False
+ self._interfaces = {}
+
+ def _get_ispublic(self):
+ if not len(self._interfaces):
+ raise Exception("No interfaces defined yet for %s" % self.resourceid)
+ return self._public
+
+ def addinterface(self, iface):
+ if not isinstance(iface, interface):
+ raise Exception("Wrong resource type %s is not an interface" % iface)
+ self._interfaces[iface.interface] = iface
+ self._public |= iface.ispublic
+
+ def interfaces(self):
+ for iface in self._interfaces.itervalues():
+ if not iface.isvalid:
+ print "WW: invalid interface:", iface.resourceid
+ continue
+ yield iface.interface, iface.address, iface.ispublic, iface.hostname, iface.direction
+
+ def get_ipaddress(self, interfacename):
+ for ifname, address, _, _, _ in self.interfaces():
+ if ifname == interfacename:
+ return address
+ raise Exception("%s has no interface %s" % (self.resourceid, interfacename))
+
+ def get_hostname(self, interfacename):
+ for ifname, address, _, hostname, _ in self.interfaces():
+ if ifname != interfacename:
+ continue
+ if hostname:
+ return hostname
+ else:
+ return address
+ raise Exception("%s has no interface %s" % (self.resourceid, interfacename))
+
+ ispublic = property(_get_ispublic,None,None)
diff --git a/Monitoring/src/main/python/Resource/node.py.old b/Monitoring/src/main/python/Resource/node.py.old
new file mode 100644
index 0000000..5a65121
--- /dev/null
+++ b/Monitoring/src/main/python/Resource/node.py.old
@@ -0,0 +1,48 @@
+'''
+Created on May 31, 2012
+
+@author: steger
+'''
+from Resource.resource import resource
+from Resource.interface import interface
+
+class node(resource):
+ def __init__(self, name = None, resourceid = None):
+ resource.__init__(self, name, resourceid)
+ self._public = False
+ self._interfaces = {}
+
+ @property
+ def ispublic(self):
+ if not len(self._interfaces):
+ raise Exception("No interfaces defined yet for %s" % self.resourceid)
+ return self._public
+
+ def addinterface(self, iface):
+ if not isinstance(iface, interface):
+ raise Exception("Wrong resource type %s is not an interface" % iface)
+ self._interfaces[iface.interface] = iface
+ self._public |= iface.ispublic
+
+ def interfaces(self):
+ for iface in self._interfaces.itervalues():
+ if not iface.isvalid:
+ print "WW: invalid interface:", iface.resourceid
+ continue
+ yield iface.interface, iface.address, iface.ispublic, iface.hostname, iface.direction
+
+ def get_ipaddress(self, interfacename):
+ for ifname, address, _, _, _ in self.interfaces():
+ if ifname == interfacename:
+ return address
+ raise Exception("%s has no interface %s" % (self.resourceid, interfacename))
+
+ def get_hostname(self, interfacename):
+ for ifname, address, _, hostname, _ in self.interfaces():
+ if ifname != interfacename:
+ continue
+ if hostname:
+ return hostname
+ else:
+ return address
+ raise Exception("%s has no interface %s" % (self.resourceid, interfacename))
diff --git a/Monitoring/src/main/python/Resource/path$py.class b/Monitoring/src/main/python/Resource/path$py.class
new file mode 100644
index 0000000..a575bba
--- /dev/null
+++ b/Monitoring/src/main/python/Resource/path$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Resource/path.py b/Monitoring/src/main/python/Resource/path.py
new file mode 100644
index 0000000..6296bc4
--- /dev/null
+++ b/Monitoring/src/main/python/Resource/path.py
@@ -0,0 +1,9 @@
+'''
+Created on Jun 12, 2012
+
+@author: steger
+'''
+from Resource.link import link
+
+class path(link):
+ pass \ No newline at end of file
diff --git a/Monitoring/src/main/python/Resource/path.py.old b/Monitoring/src/main/python/Resource/path.py.old
new file mode 100644
index 0000000..6296bc4
--- /dev/null
+++ b/Monitoring/src/main/python/Resource/path.py.old
@@ -0,0 +1,9 @@
+'''
+Created on Jun 12, 2012
+
+@author: steger
+'''
+from Resource.link import link
+
+class path(link):
+ pass \ No newline at end of file
diff --git a/Monitoring/src/main/python/Resource/resource$py.class b/Monitoring/src/main/python/Resource/resource$py.class
new file mode 100644
index 0000000..9bb5d7a
--- /dev/null
+++ b/Monitoring/src/main/python/Resource/resource$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Resource/resource.py b/Monitoring/src/main/python/Resource/resource.py
new file mode 100644
index 0000000..e55e104
--- /dev/null
+++ b/Monitoring/src/main/python/Resource/resource.py
@@ -0,0 +1,41 @@
+'''
+Created on May 31, 2012
+
+@author: steger
+'''
+
+class resource(object):
+ def __init__(self, name = None, resourceid = None):
+ self._name = name
+ self._resourceid = resourceid
+
+ @staticmethod
+ def ipret(x):
+ if not x:
+ return None
+ x = str(x)
+ if len(x):
+ return x
+ else:
+ return None
+
+ def _get_name(self):
+ if self._name is None:
+ raise Exception("resource name is not set")
+ return self._name
+ def _set_name(self, name):
+ self._name = self.ipret(name)
+ def _del_name(self):
+ self._name = None
+
+ def _get_resourceid(self):
+ if self._resourceid is None:
+ raise Exception("resource id is not set")
+ return self._resourceid
+ def _set_resourceid(self, resourceid):
+ self._resourceid = resourceid
+ def _del_resourceid(self):
+ self._resourceid = None
+ resourceid = property(_get_resourceid,_set_resourceid,_del_resourceid)
+
+ name = property(_get_name,_set_name,_del_name)
diff --git a/Monitoring/src/main/python/Resource/resource.py.old b/Monitoring/src/main/python/Resource/resource.py.old
new file mode 100644
index 0000000..fb093bd
--- /dev/null
+++ b/Monitoring/src/main/python/Resource/resource.py.old
@@ -0,0 +1,44 @@
+'''
+Created on May 31, 2012
+
+@author: steger
+'''
+
+class resource(object):
+ def __init__(self, name = None, resourceid = None):
+ self._name = name
+ self._resourceid = resourceid
+
+ @staticmethod
+ def ipret(x):
+ if not x:
+ return None
+ x = str(x)
+ if len(x):
+ return x
+ else:
+ return None
+
+ @property
+ def name(self):
+ if self._name is None:
+ raise Exception("resource name is not set")
+ return self._name
+ @name.setter
+ def name(self, name):
+ self._name = self.ipret(name)
+ @name.deleter
+ def name(self):
+ self._name = None
+
+ @property
+ def resourceid(self):
+ if self._resourceid is None:
+ raise Exception("resource id is not set")
+ return self._resourceid
+ @resourceid.setter
+ def resourceid(self, resourceid):
+ self._resourceid = resourceid
+ @resourceid.deleter
+ def resourceid(self):
+ self._resourceid = None \ No newline at end of file
diff --git a/Monitoring/src/main/python/Resource/slice.py b/Monitoring/src/main/python/Resource/slice.py
new file mode 100644
index 0000000..a66a93d
--- /dev/null
+++ b/Monitoring/src/main/python/Resource/slice.py
@@ -0,0 +1,37 @@
+'''
+Created on Oct 30, 2012
+
+@author: steger
+'''
+
+class slice_pointer(object):
+ '''
+ classdocs
+ '''
+
+ def __init__(self, sliceid = None, slicename = ""):
+ '''
+ Constructor
+ '''
+ self._sliceid = sliceid
+ self._name = slicename
+
+ def _get_sliceid(self):
+ if self._sliceid is None:
+ raise Exception("slice id is not set")
+ return self._sliceid
+ def _set_sliceid(self, sliceid):
+ self._sliceid = sliceid
+ def _del_sliceid(self):
+ self._sliceid = None
+
+ def _get_name(self):
+ return self._name
+ def _set_name(self, name):
+ self._name = name
+ def _del_name(self):
+ self._name = ""
+
+ sliceid = property(_get_sliceid,_set_sliceid,_del_sliceid)
+
+ name = property(_get_name,_set_name,_del_name)
diff --git a/Monitoring/src/main/python/Resource/slice.py.old b/Monitoring/src/main/python/Resource/slice.py.old
new file mode 100644
index 0000000..002318f
--- /dev/null
+++ b/Monitoring/src/main/python/Resource/slice.py.old
@@ -0,0 +1,40 @@
+'''
+Created on Oct 30, 2012
+
+@author: steger
+'''
+
+class slice_pointer(object):
+ '''
+ classdocs
+ '''
+
+ def __init__(self, sliceid = None, slicename = ""):
+ '''
+ Constructor
+ '''
+ self._sliceid = sliceid
+ self._name = slicename
+
+ @property
+ def sliceid(self):
+ if self._sliceid is None:
+ raise Exception("slice id is not set")
+ return self._sliceid
+ @sliceid.setter
+ def sliceid(self, sliceid):
+ self._sliceid = sliceid
+ @sliceid.deleter
+ def sliceid(self):
+ self._sliceid = None
+
+ @property
+ def name(self):
+ return self._name
+ @name.setter
+ def name(self, name):
+ self._name = name
+ @name.deleter
+ def name(self):
+ self._name = ""
+ \ No newline at end of file
diff --git a/Monitoring/src/main/python/Semantics/InformationModel$py.class b/Monitoring/src/main/python/Semantics/InformationModel$py.class
new file mode 100644
index 0000000..ab59485
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/InformationModel$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Semantics/InformationModel.py b/Monitoring/src/main/python/Semantics/InformationModel.py
new file mode 100644
index 0000000..40aa44d
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/InformationModel.py
@@ -0,0 +1,88 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+from urllib2 import URLError
+from rdflib import Graph, Namespace, URIRef, plugin
+from rdflib.query import Processor, Result
+import pkgutil
+import StringIO
+import monitoringmodel.im
+import os.path
+
+class IMError(Exception):
+ pass
+
+class Ontology(object):
+ ontology = {
+ 'owl': (None, "http://www.w3.org/2002/07/owl#"),
+ 'unit': ('unit.owl', "http://fp7-novi.eu/unit.owl#"),
+ 'param': ('monitoring_parameters.owl', "http://fp7-novi.eu/monitoring_parameter.owl#"),
+ 'feature': ('monitoring_features.owl', "http://fp7-novi.eu/monitoring_features.owl#"),
+ 'task': ('monitoring_task.owl', "http://fp7-novi.eu/monitoring_task.owl#"),
+ 'query': (None, "http://fp7-novi.eu/monitoring_query.owl#"), #('monitoring_query.owl', ...)
+ 'conf': (None, "http://fp7-novi.eu/config.owl#"),
+ 'stat': (None, 'http://fp7-novi.eu/monitoring_stat.owl#'),
+ 'core': ('novi-im.owl', "http://fp7-novi.eu/im.owl#"),
+ }
+
+ def __init__(self, baseurl, config_owl):
+
+ plugin.register(
+ 'sparql', Processor,
+ 'rdfextras.sparql.processor', 'Processor')
+ plugin.register(
+ 'sparql', Result,
+ 'rdfextras.sparql.query', 'SPARQLQueryResult')
+
+ # JYTHON hack for accessing owl files
+ im = monitoringmodel.im.im()
+ path = im.path
+ loader = pkgutil.get_loader("monitoringmodel.im")
+
+ self.baseurl = path #baseurl
+ self.graph = Graph()
+ # load owl files and bind name spaces
+ try:
+ url = os.path.join(path, config_owl)
+ self.graph += Graph().parse(source = StringIO.StringIO(loader.get_data(url)) )
+ except URLError:
+ raise IMError("URLError: Cannot read model %s" % config_owl)
+ for prefix, (owl, ns) in self.ontology.iteritems():
+ if owl:
+ url = os.path.join(path, owl) #"%s/%s" % (self.baseurl, owl)
+ try:
+ self.graph += Graph().parse(source = StringIO.StringIO(loader.get_data(url)) )
+ except URLError:
+ raise IMError("URLError: Cannot read model %s" % url)
+ try:
+ self.graph.bind(prefix, Namespace(ns))
+ except:
+ pass
+
+ @staticmethod
+ def _float(f):
+ if '.' in f or 'e' in f or 'E' in f:
+ return float(f)
+ else:
+ return int(f)
+
+ @staticmethod
+ def _tail(uriref):
+ if not isinstance(uriref, URIRef):
+ raise IMError("Wrong uriref %s" % uriref)
+ return str(uriref).split("#")[-1]
+
+ def query(self, query):
+ return self.graph.query(query, initNs = dict(self.graph.namespaces()))
+
+ def ns(self, prefix):
+ for p, ns in self.graph.namespaces():
+ if p == prefix:
+ return Namespace(ns)
+ raise IMError("Unknown prefix: %s" % prefix)
+
+ def dump(self):
+ for t in self.graph.triples((None, None, None)):
+ print t
diff --git a/Monitoring/src/main/python/Semantics/InformationModel.py.old b/Monitoring/src/main/python/Semantics/InformationModel.py.old
new file mode 100644
index 0000000..9de648e
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/InformationModel.py.old
@@ -0,0 +1,79 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+from urllib2 import URLError
+from rdflib import Graph, Namespace, URIRef, plugin
+from rdflib.query import Processor, Result
+
+class IMError(Exception):
+ pass
+
+class Ontology(object):
+ ontology = {
+ 'owl': (None, "http://www.w3.org/2002/07/owl#"),
+ 'unit': ('unit.owl', "http://fp7-novi.eu/unit.owl#"),
+ 'param': ('monitoring_parameters.owl', "http://fp7-novi.eu/monitoring_parameter.owl#"),
+ 'feature': ('monitoring_features.owl', "http://fp7-novi.eu/monitoring_features.owl#"),
+ 'task': ('monitoring_task.owl', "http://fp7-novi.eu/monitoring_task.owl#"),
+ 'query': (None, "http://fp7-novi.eu/monitoring_query.owl#"), #('monitoring_query.owl', ...)
+ 'conf': (None, "http://fp7-novi.eu/config.owl#"),
+ 'stat': (None, 'http://fp7-novi.eu/monitoring_stat.owl#'),
+ 'core': ('novi-im.owl', "http://fp7-novi.eu/im.owl#"),
+ }
+
+ def __init__(self, baseurl, config_owl):
+
+ plugin.register(
+ 'sparql', Processor,
+ 'rdfextras.sparql.processor', 'Processor')
+ plugin.register(
+ 'sparql', Result,
+ 'rdfextras.sparql.query', 'SPARQLQueryResult')
+
+
+ self.baseurl = baseurl
+ self.graph = Graph()
+ # load owl files and bind name spaces
+ try:
+ self.graph += Graph().parse(source = config_owl)
+ except URLError:
+ raise IMError("URLError: Cannot read model %s" % config_owl)
+ for prefix, (owl, ns) in self.ontology.iteritems():
+ if owl:
+ url = "%s/%s" % (self.baseurl, owl)
+ try:
+ self.graph += Graph().parse(source = url)
+ except URLError:
+ raise IMError("URLError: Cannot read model %s" % url)
+ try:
+ self.graph.bind(prefix, Namespace(ns))
+ except:
+ pass
+
+ @staticmethod
+ def _float(f):
+ if '.' in f or 'e' in f or 'E' in f:
+ return float(f)
+ else:
+ return int(f)
+
+ @staticmethod
+ def _tail(uriref):
+ if not isinstance(uriref, URIRef):
+ raise IMError("Wrong uriref %s" % uriref)
+ return str(uriref).split("#")[-1]
+
+ def query(self, query):
+ return self.graph.query(query, initNs = dict(self.graph.namespaces()))
+
+ def ns(self, prefix):
+ for p, ns in self.graph.namespaces():
+ if p == prefix:
+ return Namespace(ns)
+ raise IMError("Unknown prefix: %s" % prefix)
+
+ def dump(self):
+ for t in self.graph.triples((None, None, None)):
+ print t
diff --git a/Monitoring/src/main/python/Semantics/Query$py.class b/Monitoring/src/main/python/Semantics/Query$py.class
new file mode 100644
index 0000000..7749289
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/Query$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Semantics/Query.py b/Monitoring/src/main/python/Semantics/Query.py
new file mode 100644
index 0000000..93c3a2c
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/Query.py
@@ -0,0 +1,140 @@
+'''
+Created on Feb 21, 2012
+
+@author: steger
+'''
+from DataProcessing.Parameter import ParameterList
+from Resource.resource import resource as coreresource
+from DataProcessing.DataFormatter import JsonFormatter, DumbFormatter
+
+class SingleQuery(object):
+ '''
+ @summary: represents a (feature, resource) pair, representing what and where to measure
+ The requested output format is also stored here
+ Optionally some measurement specific parameters can be added and post processing can be applied
+ '''
+ def __init__(self):
+ self._feature = None
+ self._resource = None
+ self._samplechain = None
+ self._formatter = None
+ self._parameters = ParameterList()
+
+ def _get_resource(self):
+ return self._resource
+ def _set_resource(self, (resourceid, resource)):
+ if not isinstance(resource, coreresource):
+ raise Exception("%s is not a resource type" % resource)
+ self._resource = (resourceid, resource)
+
+ def _get_feature(self):
+ return self._feature
+ def _set_feature(self, feature):
+ self._feature = feature
+
+ def _get_samplechain(self):
+ return self._samplechain
+ def _set_samplechain(self, samplechain):
+ self._samplechain = samplechain
+
+ def _get_formatter(self):
+ return self._formatter
+ def _set_formatter(self, uri_formatter):
+ if str(uri_formatter).endswith("Formatter_JSON"):
+ self._formatter = JsonFormatter
+ elif str(uri_formatter).endswith("Formatter_CSV"):
+ self._formatter = DumbFormatter
+ else:
+ raise Exception("%s is not a formatter type" % uri_formatter)
+
+ def _get_paramlist(self):
+ return self._parameters
+
+ def addParameter(self, parameter):
+ self._parameters.append(parameter)
+
+
+ samplechain = property(_get_samplechain,_set_samplechain,None)
+
+ formatter = property(_get_formatter,_set_formatter,None)
+
+ resource = property(_get_resource,_set_resource,None)
+
+ feature = property(_get_feature,_set_feature,None)
+
+ paramlist = property(_get_paramlist,None,None)
+class QueryBundle(object):
+ '''
+ @summary: represents a collection of SingleQueries
+ '''
+ def __init__(self):
+ self.atoms = {}
+
+ def __len__(self):
+ return len(self.atoms)
+
+ def has_key(self, key):
+ return self.atoms.has_key(key)
+
+ def __iter__(self):
+ for q in self.atoms.itervalues():
+ yield q
+
+ def getResource(self, resourceid):
+ for q in self:
+ if q.resource[0] == resourceid:
+ return q.resource[1]
+ return None
+
+ def add(self, reference, q):
+ if self.atoms.has_key(reference):
+ raise Exception("Duplicate MonitoringQuery entry")
+ if not isinstance(q, SingleQuery):
+ raise Exception("Wrong type")
+ self.atoms[reference] = q
+
+
+
+
+
+#class BundleQueryBundle(QueryBundle):
+# def newQuery(self, key, feature, samplechain, resource, formatter):
+# if self.atoms.has_key(key):
+# raise Exception("Atomic query %s exists" % key)
+# Q = MonitorQuery()
+# Q.resource = resource
+# Q.feature = feature
+# Q.samplechain = samplechain
+# Q.formatter = formatter
+# self.atoms[key] = Q
+
+# def addParameter(self, key, parameter):
+# if not self.atoms.has_key(key):
+# raise Exception("Atomic query %s does not exist" % key)
+# self.atoms[key].addParameter(parameter)
+
+
+#class AggregatorQuery(Query):
+# def __init__(self):
+# Query.__init__(self)
+# self._processid = None
+
+# def _get_processid(self):
+# return self._processid
+# def _set_processid(self, processid):
+# self._processid = processid
+
+
+
+#class SampleQuery(QueryBundle):
+# def newQuery(self, key, processid, feature, samplechain, formatter):
+# if self.atoms.has_key(key):
+# raise Exception("Atomic query %s exists" % key)
+# Q = AggregatorQuery()
+# Q.processid = processid
+# Q.feature = feature
+# Q.samplechain = samplechain
+# Q.formatter = formatter
+# self.atoms[key] = Q
+
+#processid = property(_get_processid,_set_processid,None)
diff --git a/Monitoring/src/main/python/Semantics/Query.py.old b/Monitoring/src/main/python/Semantics/Query.py.old
new file mode 100644
index 0000000..80e7e41
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/Query.py.old
@@ -0,0 +1,139 @@
+'''
+Created on Feb 21, 2012
+
+@author: steger
+'''
+from DataProcessing.Parameter import ParameterList
+from Resource.resource import resource as coreresource
+from DataProcessing.DataFormatter import JsonFormatter, DumbFormatter
+
+class SingleQuery(object):
+ '''
+ @summary: represents a (feature, resource) pair, representing what and where to measure
+ The requested output format is also stored here
+ Optionally some measurement specific parameters can be added and post processing can be applied
+ '''
+ def __init__(self):
+ self._feature = None
+ self._resource = None
+ self._samplechain = None
+ self._formatter = None
+ self._parameters = ParameterList()
+
+ @property
+ def resource(self):
+ return self._resource
+ @resource.setter
+ def resource(self, (resourceid, resource)):
+ if not isinstance(resource, coreresource):
+ raise Exception("%s is not a resource type" % resource)
+ self._resource = (resourceid, resource)
+
+ @property
+ def feature(self):
+ return self._feature
+ @feature.setter
+ def feature(self, feature):
+ self._feature = feature
+
+ @property
+ def samplechain(self):
+ return self._samplechain
+ @samplechain.setter
+ def samplechain(self, samplechain):
+ self._samplechain = samplechain
+
+ @property
+ def formatter(self):
+ return self._formatter
+ @formatter.setter
+ def formatter(self, uri_formatter):
+ if str(uri_formatter).endswith("Formatter_JSON"):
+ self._formatter = JsonFormatter
+ elif str(uri_formatter).endswith("Formatter_CSV"):
+ self._formatter = DumbFormatter
+ else:
+ raise Exception("%s is not a formatter type" % uri_formatter)
+
+ @property
+ def paramlist(self):
+ return self._parameters
+
+ def addParameter(self, parameter):
+ self._parameters.append(parameter)
+
+class QueryBundle(object):
+ '''
+ @summary: represents a collection of SingleQueries
+ '''
+ def __init__(self):
+ self.atoms = {}
+
+ def __len__(self):
+ return len(self.atoms)
+
+ def has_key(self, key):
+ return self.atoms.has_key(key)
+
+ def __iter__(self):
+ for q in self.atoms.itervalues():
+ yield q
+
+ def getResource(self, resourceid):
+ for q in self:
+ if q.resource[0] == resourceid:
+ return q.resource[1]
+ return None
+
+ def add(self, reference, q):
+ if self.atoms.has_key(reference):
+ raise Exception("Duplicate MonitoringQuery entry")
+ if not isinstance(q, SingleQuery):
+ raise Exception("Wrong type")
+ self.atoms[reference] = q
+
+
+
+
+
+#class BundleQueryBundle(QueryBundle):
+# def newQuery(self, key, feature, samplechain, resource, formatter):
+# if self.atoms.has_key(key):
+# raise Exception("Atomic query %s exists" % key)
+# Q = MonitorQuery()
+# Q.resource = resource
+# Q.feature = feature
+# Q.samplechain = samplechain
+# Q.formatter = formatter
+# self.atoms[key] = Q
+
+# def addParameter(self, key, parameter):
+# if not self.atoms.has_key(key):
+# raise Exception("Atomic query %s does not exist" % key)
+# self.atoms[key].addParameter(parameter)
+
+
+#class AggregatorQuery(Query):
+# def __init__(self):
+# Query.__init__(self)
+# self._processid = None
+
+# @property
+# def processid(self):
+# return self._processid
+# @processid.setter
+# def processid(self, processid):
+# self._processid = processid
+
+
+
+#class SampleQuery(QueryBundle):
+# def newQuery(self, key, processid, feature, samplechain, formatter):
+# if self.atoms.has_key(key):
+# raise Exception("Atomic query %s exists" % key)
+# Q = AggregatorQuery()
+# Q.processid = processid
+# Q.feature = feature
+# Q.samplechain = samplechain
+# Q.formatter = formatter
+# self.atoms[key] = Q
diff --git a/Monitoring/src/main/python/Semantics/QueryInterpreter$py.class b/Monitoring/src/main/python/Semantics/QueryInterpreter$py.class
new file mode 100644
index 0000000..d001aa9
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/QueryInterpreter$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Semantics/QueryInterpreter.py b/Monitoring/src/main/python/Semantics/QueryInterpreter.py
new file mode 100644
index 0000000..1422554
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/QueryInterpreter.py
@@ -0,0 +1,272 @@
+'''
+Created on Feb 21, 2012
+
+@author: steger
+'''
+
+from Semantics.Query import QueryBundle, SingleQuery
+from Resource.node import node
+from Resource.interface import interface
+from Resource.link import link
+from DataProcessing.Aggregator import Max, Min, Percentile, Mean, Deviation
+from DataProcessing.Sampler import Tail, Head
+from DataProcessing.Parameter import ParameterList
+
+
+class QueryInterpreter(object):
+ '''
+ classdocs
+ '''
+ samplesource = 'UnmodifiedExtractOfFeatureSamples'
+ lut_skeleton = {
+ 'Maximum': Max,
+ 'Minimum': Min,
+ 'Percentile': Percentile,
+ 'Average': Mean,
+ 'Variance': Deviation,
+ 'Tail': Tail,
+ 'Head': Head
+ }
+
+ def __init__(self, model):
+ '''
+ @summary: constructor
+ @param model: the task model to resolve the tools
+ @type model: TaskModel
+ '''
+ self.model = model
+
+ def getUnitOfDimension(self, ref_dim):
+ return self.model.dm[ref_dim].unit
+
+ def getUnit(self, uri_prefix, uri_unit):
+ if uri_prefix is None:
+ uref = self.model._tail(uri_unit)
+ else:
+ uref = "%s_%s" % (self.model._tail(uri_prefix), self.model._tail(uri_unit))
+ return self.um[uref]
+
+ def _get_myns(self):
+ return dict(self.model.ontology.graph.namespaces())
+
+
+ myns = property(_get_myns,None,None)
+
+
+ def inferInterfacesOf(self, qgraph, uri_node):
+ q = """
+SELECT ?ifin ?address ?unit ?prefix
+WHERE {
+ <%s> core:hasInboundInterface ?ifin ;
+ core:hasOutboundInterface ?ifout .
+ ?ifin a core:Interface ;
+ core:hasIPv4Address ?addressobj .
+ ?ifout a core:Interface ;
+ core:hasIPv4Address ?addressobj .
+ ?addressobj a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a unit:IPAddress ;
+ unit:hasValue ?address .
+ OPTIONAL {
+ ?addressobj unit:hasUnit ?unit .
+ }
+ OPTIONAL {
+ ?addressobj unit:hasPrefix ?prefix .
+ }
+}
+ """ % uri_node
+ for uri_ifin, address, uri_unit, uri_prefix in qgraph.query(q, initNs = self.myns):
+ name = self.model.ontology._tail(uri_ifin)
+ iface = interface(name, resourceid = uri_ifin)
+ if uri_unit is not None:
+ iface.address = str(address), self.getUnit(uri_prefix, uri_unit)
+ else:
+ iface.address = str(address), self.getUnitOfDimension('IPAddress')
+ iface.direction = iface.EGRESS | iface.INGRESS
+ #FIXME: this info should come from the model
+ iface.interface = "eth0"
+ iface.ispublic = True
+ yield iface
+ #TODO: similarly look up uni directional interfaces of the node and yield them as well
+
+ def inferInterfacesOfLink(self, qgraph, uri_node, iftype):
+ q = """
+SELECT ?ifin ?address ?unit ?prefix
+WHERE {
+ <%s> core:%s ?ifin .
+ ?ifin a core:Interface ;
+ core:hasIPv4Address ?addressobj .
+ ?addressobj a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a unit:IPAddress ;
+ unit:hasValue ?address .
+ OPTIONAL {
+ ?addressobj unit:hasUnit ?unit .
+ }
+ OPTIONAL {
+ ?addressobj unit:hasPrefix ?prefix .
+ }
+}
+ """ % (uri_node, iftype)
+ for uri_ifin, address, uri_unit, uri_prefix in qgraph.query(q, initNs = self.myns):
+ name = self.model.ontology._tail(uri_ifin)
+ iface = interface(name, resourceid = uri_ifin)
+ if uri_unit is not None:
+ iface.address = str(address), self.getUnit(uri_prefix, uri_unit)
+ else:
+ iface.address = str(address), self.getUnitOfDimension('IPAddress')
+ if iftype=="hasSource": iface.direction = iface.EGRESS
+ else: iface.direction = iface.INGRESS
+ #FIXME: this info should come from the model
+ iface.interface = "eth0"
+ iface.ispublic = True
+ yield iface
+ #TODO: similarly look up uni directional interfaces of the node and yield them as well
+
+
+
+
+ def inferBundleQueries(self, qgraph):
+ '''
+ @summary:
+ '''
+ q = """
+SELECT ?query ?feature ?sample ?resource ?formatter
+WHERE {
+ ?query a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a query:BundleQuery ;
+ feature:hasFeature ?feature ;
+ stat:hasSample ?sample ;
+ query:hasResource ?resource ;
+ query:hasFormatter ?formatter .
+}
+ """
+ Q = QueryBundle()
+ for uri_query, uri_feature, uri_sample, uri_resource, uri_formatter in qgraph.query(q, initNs = self.myns):
+ r = Q.getResource(uri_resource)
+ if r is None:
+ r = self.translateResource(qgraph, uri_resource)
+ sq = SingleQuery()
+ sq.feature = uri_feature
+ sq.resource = (uri_resource, r)
+ sq.formatter = uri_formatter
+ sq.samplechain = self.inferSampleChain(qgraph, uri_sample)
+ for p in self.inferParameters(qgraph, uri_query):
+ sq.addParameter(parameter = p)
+ Q.add(uri_query, sq)
+ return Q
+
+ def getSampleManipulationQuery(self, qgraph):
+ '''
+ @summary:
+ '''
+ q = """
+SELECT ?query ?feature ?sample ?formatter
+WHERE {
+ ?query a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a query:SampleManipulationQuery ;
+ feature:hasFeature ?feature ;
+ stat:hasSample ?sample ;
+ query:hasFormatter ?formatter .
+}
+ """
+ Q = SampleQuery()
+ resources = {}
+ for uri_query, uri_feature, uri_sample, uri_resource, uri_formatter in qgraph.query(q, initNs = self.myns):
+ resource_name = self.model.ontology._tail(uri_resource)
+ if not resources.has_key(resource_name):
+ resources[resource_name] = self.translateResource(qgraph, uri_resource)
+ if not Q.has_key(uri_query):
+ samplechain = self.inferSampleChain(qgraph, uri_sample)
+ Q.newQuery(key = uri_query, feature = uri_feature, samplechain = samplechain, resource = resources[resource_name], formatter = uri_formatter)
+ for p in self.inferParameters(qgraph, uri_query):
+ Q.addParameter(key = uri_query, parameter = p)
+ return Q
+
+
+ def inferSampleChain(self, qgraph, uri_sample):
+ tail = self.model.ontology._tail(uri_sample)
+ if tail == self.samplesource:
+ return []
+ q = """
+SELECT ?nextsample ?sampleop
+WHERE {
+ <%s> a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ stat:hasSample ?nextsample ;
+ a ?sampleop
+}
+ """ % uri_sample
+ for uri_sample_next, uri_sampleop in qgraph.query(q, initNs = self.myns):
+ tail = self.model.ontology._tail(uri_sampleop)
+ if tail in [ 'NamedIndividual' ]:
+ continue
+ op = self.inferSampleChain(qgraph, uri_sample_next)
+ break
+ skeleton = self.lut_skeleton[tail]
+ parlist = ParameterList([ p for p in self.inferParameters(qgraph, uri_sample) ])
+ op.append( (skeleton, parlist) )
+ return op
+
+ def inferParameters(self, qgraph, uri_query):
+ q = """
+SELECT ?name ?type ?dim ?defval ?unit ?prefix
+WHERE {
+ <%s> param:hasParameter ?par .
+ ?par a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ param:paramName ?name ;
+ param:hasType ?type ;
+ a ?dim .
+ OPTIONAL {
+ ?par unit:hasValue ?defval .
+ OPTIONAL {
+ ?par unit:hasUnit ?unit .
+ }
+ OPTIONAL {
+ ?par unit:hasPrefix ?prefix .
+ }
+ }
+}
+ """ % uri_query
+ for uri_name, uri_type, uri_dim, uri_default, uri_unit, uri_prefix in qgraph.query(q, initNs = self.myns):
+ tail = self.model.ontology._tail(uri_dim)
+#FIXME: query should include the filter, but rdflib has a bug and only the spelt out form would work
+# FILTER ( ?dim != <http://www.w3.org/2002/07/owl#NamedIndividual> )
+# FILTER ( ?dim != query:QueryParameter )
+# FILTER ( ?dim != stat:SampleOperatorParameter )
+#
+# like:
+# FILTER ( ?dim != <http://www.w3.org/2002/07/owl#NamedIndividual> )
+ if tail in [ 'QueryParameter', 'SOP_tail', 'SOP_head', 'SOP_order', 'NamedIndividual' ]:
+ continue
+ yield self.model.translateParameter(str(uri_name), uri_dim, uri_unit, uri_prefix, uri_type, uri_default)
+
+ def translateResource(self, qgraph, uri_resource):
+ resource_name = self.model.ontology._tail(uri_resource)
+ q = """
+SELECT ?resourcetype
+WHERE {
+ <%s> a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a core:Resource ;
+ a ?resourcetype ;
+}
+ """ % uri_resource
+ for uri_rtype, in qgraph.query(q, initNs = self.myns):
+ tail = self.model.ontology._tail(uri_rtype)
+ if tail in [ 'Resource', 'NamedIndividual', 'NetworkElement' ]:
+ continue
+ if tail == "Node":
+ r = node(name = resource_name, resourceid = uri_resource)
+ for iface in self.inferInterfacesOf(qgraph, uri_resource):
+ r.addinterface(iface)
+ return r
+ elif tail == "Link":
+ r = link(name = resource_name, resourceid = uri_resource)
+ for iface in self.inferInterfacesOfLink(qgraph, uri_resource, "hasSource"):
+ r.source = iface
+ break
+ for iface in self.inferInterfacesOfLink(qgraph, uri_resource, "hasSink"):
+ r.destination = iface
+ break
+ return r
+ else:
+ print "WW: unhandled rtype", uri_rtype
+ continue
+
diff --git a/Monitoring/src/main/python/Semantics/QueryInterpreter.py.old b/Monitoring/src/main/python/Semantics/QueryInterpreter.py.old
new file mode 100644
index 0000000..c6f5574
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/QueryInterpreter.py.old
@@ -0,0 +1,223 @@
+'''
+Created on Feb 21, 2012
+
+@author: steger
+'''
+
+from Semantics.Query import QueryBundle, SingleQuery
+from Resource.node import node
+from Resource.interface import interface
+from DataProcessing.Aggregator import Max, Min, Percentile, Mean, Deviation
+from DataProcessing.Sampler import Tail, Head
+from DataProcessing.Parameter import ParameterList
+
+
+class QueryInterpreter(object):
+ '''
+ classdocs
+ '''
+ samplesource = 'UnmodifiedExtractOfFeatureSamples'
+ lut_skeleton = {
+ 'Maximum': Max,
+ 'Minimum': Min,
+ 'Percentile': Percentile,
+ 'Average': Mean,
+ 'Variance': Deviation,
+ 'Tail': Tail,
+ 'Head': Head
+ }
+
+ def __init__(self, model):
+ '''
+ @summary: constructor
+ @param model: the task model to resolve the tools
+ @type model: TaskModel
+ '''
+ self.model = model
+
+ def getUnitOfDimension(self, ref_dim):
+ return self.model.dm[ref_dim].unit
+
+ def getUnit(self, uri_prefix, uri_unit):
+ if uri_prefix is None:
+ uref = self.model._tail(uri_unit)
+ else:
+ uref = "%s_%s" % (self.model._tail(uri_prefix), self.model._tail(uri_unit))
+ return self.um[uref]
+
+ @property
+ def myns(self):
+ return dict(self.model.ontology.graph.namespaces())
+
+ def inferInterfacesOf(self, qgraph, uri_node):
+ q = """
+SELECT ?ifin ?address ?unit ?prefix
+WHERE {
+ <%s> core:hasInboundInterface ?ifin ;
+ core:hasOutboundInterface ?ifout .
+ ?ifin a core:Interface ;
+ core:hasIPv4Address ?addressobj .
+ ?ifout a core:Interface ;
+ core:hasIPv4Address ?addressobj .
+ ?addressobj a owl:NamedIndividual ;
+ a unit:IPAddress ;
+ unit:hasValue ?address .
+ OPTIONAL {
+ ?addressobj unit:hasUnit ?unit .
+ }
+ OPTIONAL {
+ ?addressobj unit:hasPrefix ?prefix .
+ }
+}
+ """ % uri_node
+ for uri_ifin, address, uri_unit, uri_prefix in qgraph.query(q, initNs = self.myns):
+ name = self.model.ontology._tail(uri_ifin)
+ iface = interface(name, resourceid = uri_ifin)
+ if uri_unit is not None:
+ iface.address = str(address), self.getUnit(uri_prefix, uri_unit)
+ else:
+ iface.address = str(address), self.getUnitOfDimension('IPAddress')
+ iface.direction = iface.EGRESS | iface.INGRESS
+ #FIXME: this info should come from the model
+ iface.interface = "eth0"
+ iface.ispublic = True
+ yield iface
+ #TODO: similarly look up uni directional interfaces of the node and yield them as well
+
+ def inferBundleQueries(self, qgraph):
+ '''
+ @summary:
+ '''
+ q = """
+SELECT ?query ?feature ?sample ?resource ?formatter
+WHERE {
+ ?query a owl:NamedIndividual ;
+ a query:BundleQuery ;
+ feature:hasFeature ?feature ;
+ stat:hasSample ?sample ;
+ query:hasResource ?resource ;
+ query:hasFormatter ?formatter .
+}
+ """
+ Q = QueryBundle()
+ for uri_query, uri_feature, uri_sample, uri_resource, uri_formatter in qgraph.query(q, initNs = self.myns):
+ r = Q.getResource(uri_resource)
+ if r is None:
+ r = self.translateResource(qgraph, uri_resource)
+ sq = SingleQuery()
+ sq.feature = uri_feature
+ sq.resource = (uri_resource, r)
+ sq.formatter = uri_formatter
+ sq.samplechain = self.inferSampleChain(qgraph, uri_sample)
+ for p in self.inferParameters(qgraph, uri_query):
+ sq.addParameter(parameter = p)
+ Q.add(uri_query, sq)
+ return Q
+
+ def getSampleManipulationQuery(self, qgraph):
+ '''
+ @summary:
+ '''
+ q = """
+SELECT ?query ?feature ?sample ?formatter
+WHERE {
+ ?query a owl:NamedIndividual ;
+ a query:SampleManipulationQuery ;
+ feature:hasFeature ?feature ;
+ stat:hasSample ?sample ;
+ query:hasFormatter ?formatter .
+}
+ """
+ Q = SampleQuery()
+ resources = {}
+ for uri_query, uri_feature, uri_sample, uri_resource, uri_formatter in qgraph.query(q, initNs = self.myns):
+ resource_name = self.model.ontology._tail(uri_resource)
+ if not resources.has_key(resource_name):
+ resources[resource_name] = self.translateResource(qgraph, uri_resource)
+ if not Q.has_key(uri_query):
+ samplechain = self.inferSampleChain(qgraph, uri_sample)
+ Q.newQuery(key = uri_query, feature = uri_feature, samplechain = samplechain, resource = resources[resource_name], formatter = uri_formatter)
+ for p in self.inferParameters(qgraph, uri_query):
+ Q.addParameter(key = uri_query, parameter = p)
+ return Q
+
+
+ def inferSampleChain(self, qgraph, uri_sample):
+ tail = self.model.ontology._tail(uri_sample)
+ if tail == self.samplesource:
+ return []
+ q = """
+SELECT ?nextsample ?sampleop
+WHERE {
+ <%s> a owl:NamedIndividual ;
+ stat:hasSample ?nextsample ;
+ a ?sampleop
+}
+ """ % uri_sample
+ for uri_sample_next, uri_sampleop in qgraph.query(q, initNs = self.myns):
+ tail = self.model.ontology._tail(uri_sampleop)
+ if tail in [ 'NamedIndividual' ]:
+ continue
+ op = self.inferSampleChain(qgraph, uri_sample_next)
+ break
+ skeleton = self.lut_skeleton[tail]
+ parlist = ParameterList([ p for p in self.inferParameters(qgraph, uri_sample) ])
+ op.append( (skeleton, parlist) )
+ return op
+
+ def inferParameters(self, qgraph, uri_query):
+ q = """
+SELECT ?name ?type ?dim ?defval ?unit ?prefix
+WHERE {
+ <%s> param:hasParameter ?par .
+ ?par a owl:NamedIndividual ;
+ param:paramName ?name ;
+ param:hasType ?type ;
+ a ?dim .
+ OPTIONAL {
+ ?par unit:hasValue ?defval .
+ OPTIONAL {
+ ?par unit:hasUnit ?unit .
+ }
+ OPTIONAL {
+ ?par unit:hasPrefix ?prefix .
+ }
+ }
+}
+ """ % uri_query
+ for uri_name, uri_type, uri_dim, uri_default, uri_unit, uri_prefix in qgraph.query(q, initNs = self.myns):
+ tail = self.model.ontology._tail(uri_dim)
+#FIXME: query should include the filter, but rdflib has a bug and only the spelt out form would work
+# FILTER ( ?dim != owl:NamedIndividual )
+# FILTER ( ?dim != query:QueryParameter )
+# FILTER ( ?dim != stat:SampleOperatorParameter )
+#
+# like:
+# FILTER ( ?dim != <http://www.w3.org/2002/07/owl#NamedIndividual> )
+ if tail in [ 'QueryParameter', 'SOP_tail', 'SOP_head', 'SOP_order', 'NamedIndividual' ]:
+ continue
+ yield self.model.translateParameter(str(uri_name), uri_dim, uri_unit, uri_prefix, uri_type, uri_default)
+
+ def translateResource(self, qgraph, uri_resource):
+ resource_name = self.model.ontology._tail(uri_resource)
+ q = """
+SELECT ?resourcetype
+WHERE {
+ <%s> a owl:NamedIndividual ;
+ a core:Resource ;
+ a ?resourcetype ;
+}
+ """ % uri_resource
+ for uri_rtype, in qgraph.query(q, initNs = self.myns):
+ tail = self.model.ontology._tail(uri_rtype)
+ if tail in [ 'Resource', 'NamedIndividual' ]:
+ continue
+ if tail == "Node":
+ r = node(name = resource_name, resourceid = uri_resource)
+ for iface in self.inferInterfacesOf(qgraph, uri_resource):
+ r.addinterface(iface)
+ return r
+ else:
+ print "WW: unhandled rtype", uri_rtype
+ continue
+ \ No newline at end of file
diff --git a/Monitoring/src/main/python/Semantics/TaskModel$py.class b/Monitoring/src/main/python/Semantics/TaskModel$py.class
new file mode 100644
index 0000000..e154c71
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/TaskModel$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Semantics/TaskModel.py b/Monitoring/src/main/python/Semantics/TaskModel.py
new file mode 100644
index 0000000..5f77aec
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/TaskModel.py
@@ -0,0 +1,436 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+from Credential.credentialtypes import UsernamePassword, UsernameRSAKey
+from DataProcessing.Data import DataHeader, DataHeaderCell
+from DataProcessing.Parameter import ParameterList, Parameter
+from Driver.SOAPClient import SOAPClient
+from Driver.SshExec import SshExec
+from Driver.REST import RESTDriver
+import Driver.REST
+
+class TaskModelError(Exception):
+ pass
+
+class TaskModel(object):
+ '''
+ classdocs
+ '''
+ hooklookup = {
+ 'hasPreHook' : 'prehook',
+ 'hasStartHook' : 'starthook',
+ 'hasRetrieveHook' : 'retrievehook',
+ 'hasStopHook' : 'stophook',
+ 'hasPostHook' : 'posthook',
+ }
+ typelookup = {
+ 'Integer': int,
+ 'Float': float,
+ 'String': str
+ }
+
+ usingREST = False
+
+ def __init__(self, dimensionmanager, unitmanager, ontology):
+ '''
+ @summary: constructor
+ @param dimensionmanager: the container to form a cell's dimension
+ @type dimensionmanager: DimensionManager
+ @param unitmanager: the container to form a cell's unit
+ @type unitmanager: UnitManager
+ @param ontology: the basic knowledge
+ @type ontology: Ontology
+ '''
+ self.ontology = ontology
+ self.dm = dimensionmanager
+ self.um = unitmanager
+
+ def inferDomains(self):
+ '''
+ @summary: extract the monitoring domains from the information model
+ @return: generator of the list of domains
+ @rtype: URIRef
+ '''
+ for uri_domain, _, _ in self.ontology.graph.triples((None, self.ontology.ns('rdf')['type'], self.ontology.ns('task')['MonitoringDomain'])):
+ yield uri_domain
+
+ def inferFeatures(self):
+ '''
+ @summary: extract the monitored features from the information model
+ @return: a generator of the list of (feature reference, name) pairs
+ @rtype: (URIRef, str)
+ '''
+ q = """
+SELECT ?feature ?name ?resource
+WHERE {
+ ?feature a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a ?parent ;
+ feature:featureName ?name .
+ ?parent rdfs:subClassOf feature:MonitoredFeature .
+ ?resource feature:hasFeature ?feature
+}
+ """
+ for uri_feature, name, uri_resource in self.ontology.query(q):
+ yield uri_feature, str(name), uri_resource
+
+ def inferTasks(self, domain, feature):
+ '''
+ @summary: provides a generator to crawl over the tasks that can measure a given feature in the given domain of interest
+ @param domain: domain of interest
+ @type domain: URIRef
+ @param feature: the feature to measure
+ @type feature: URIRef
+ @return: a generator of the list of (task reference, task name) pairs
+ @rtype: (URIRef, str)
+ '''
+ q = """
+SELECT ?task ?name
+WHERE {
+ ?task a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a task:MonitoringTask ;
+ task:name ?name ;
+ task:hasMonitoringDomain task:%s ;
+ task:hasOutputTableFormat ?data .
+?data task:hasColumn ?col .
+?col task:hasMonitoredFeature feature:%s
+}
+ """ % (self.ontology._tail(domain), self.ontology._tail(feature))
+ for uri_task, tname in self.ontology.query(q):
+ yield uri_task, str(tname)
+
+ def inferCredentialOf(self, task):
+ '''
+ @summary: extracts the set of acceptable credential templates the given task accepts
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: a set of an uninitialized Credential classes
+ @rtype: set(Credential)
+ @raise IMError: Unknown authentication type
+ '''
+ creds = set()
+ for (_, _, auth) in self.ontology.graph.triples((task, self.ontology.ns('task')['hasAuthenticationType'], None)):
+ if auth == self.ontology.ns('task')["UsernamePassword"]:
+ creds.add(UsernamePassword)
+ elif auth == self.ontology.ns('task')["UsernameRSAKey"]:
+ creds.add(UsernameRSAKey)
+ else:
+ raise TaskModelError("Unknown authentication type %s" % auth)
+ return creds
+
+ def inferDriverOf(self, task):
+ '''
+ @summary: extarcts the driver of the task
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: the appropriate driver class uninstantiated
+ @rtype: Driver
+ @raise IMError: Unknown driver type / hasDriver missing
+ '''
+ try:
+ _, _, driver = self.ontology.graph.triples((task, self.ontology.ns('task')['hasDriver'], None)).next()
+ if driver == self.ontology.ns('task')["SOAPClient"]:
+ return SOAPClient
+ elif driver == self.ontology.ns('task')["SSH"]:
+ return SshExec
+ elif driver == self.ontology.ns('task')["REST"]:
+ if not self.usingREST:
+ self.usingREST = True
+ try:
+ Driver.REST.setSSLTrusted()
+ except:
+ raise TaskModelError("REST SSL Error")
+ return RESTDriver
+ else:
+ raise TaskModelError("Unknown driver type %s" % driver)
+ except StopIteration:
+ raise TaskModelError("hasDriver is missing for task %s" % task)
+
+ def inferHookparametersOf(self, task):
+ '''
+ @summary: extract the necessary control parameters for task initialization
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: a lookup table of arguments, which are passed to the Task object's prehook method as keyword arguments
+ @rtype: dict
+ '''
+ q = """
+SELECT ?name ?value ?type
+WHERE {
+ conf:%s task:hasHookParameter ?p .
+ ?p param:paramName ?name ;
+ a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ rdf:type task:HookParameter ;
+ unit:hasValue ?value ;
+ param:hasType ?type .
+}
+ """ % (self.ontology._tail(task))
+ d = {}
+ for pname, pvalue, ptype in self.ontology.query(q):
+ pname = str(pname)
+ if ptype == self.ontology.ns('param')["Integer"]:
+ d[pname] = int(pvalue)
+ elif ptype == self.ontology.ns('param')["Float"]:
+ d[pname] = float(pvalue)
+ else:
+ d[pname] = str(pvalue)
+ return d
+
+ def inferHookdefinitionsOf(self, task):
+ '''
+ @summary: extract the hook implementation details for task initialization
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: a lookup table of hook definitions
+ @rtype: dict
+ '''
+ q = """
+SELECT ?rel ?value
+WHERE {
+ conf:%s ?rel ?h .
+ ?h task:hookCode ?value .
+}
+ """ % (self.ontology._tail(task))
+ d = {}
+ for hrel, hvalue in self.ontology.query(q):
+ hook = self.ontology._tail(uriref = hrel)
+ d[self.hooklookup[hook]] = str(hvalue).replace('\\n', '\n').replace('\\t', '\t').replace('\\\\', '\\').strip()
+ return d
+
+ def inferDataheaderOf(self, task):
+ '''
+ @summary: extract the data header declaration the for task
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: an initialized DataHeader instance
+ @rtype: DataHeader
+ '''
+ q = """
+SELECT ?tablename ?colname ?dim ?feature ?unit ?prefix
+WHERE {
+ conf:%s task:hasOutputTableFormat ?hdr .
+ ?hdr task:name ?tablename .
+ ?hdr task:hasColumn ?col .
+ ?col task:name ?colname ;
+ a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a ?dim ;
+ task:sequenceNumber ?seqno .
+ {
+ ?dim rdfs:subClassOf unit:BaseDimension .
+ } UNION {
+ ?dim rdfs:subClassOf ?p .
+ ?p rdfs:subClassOf unit:DerivedDimension .
+ }
+ OPTIONAL {
+ ?col task:hasMonitoredFeature ?feature .
+ }
+ OPTIONAL {
+ ?col unit:hasUnit ?unit .
+ OPTIONAL {
+ ?col unit:hasPrefix ?prefix .
+ }
+ }
+}
+ORDER BY ?seqno
+ """ % (self.ontology._tail(task))
+ datahdr = None
+ for tablename, colname, uri_dim, uri_feature, uri_unit, uri_prefix in self.ontology.query(q):
+ if datahdr is None:
+ datahdr = DataHeader(str(tablename))
+ if uri_unit is None:
+ u = None
+ elif uri_prefix is None:
+ u = self.um[ self.ontology._tail(uri_unit) ]
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), self.ontology._tail(uri_unit))
+ u = self.um[ref]
+ d = self.dm[ self.ontology._tail(uri_dim) ]
+ if uri_feature is None:
+ cell = DataHeaderCell(name = str(colname), dimension = d, unit = u)
+ else:
+ cell = DataHeaderCell(name = str(colname), dimension = d, feature = uri_feature, unit = u)
+ datahdr.addColumn(cell)
+ return datahdr
+
+ def inferParametersOf(self, task):
+ '''
+ @summary: extract the parameter list for the given task
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: an initialized list of the parameters of the task
+ @rtype: ParameterList
+ '''
+ q = """
+SELECT ?name ?ptype ?dim ?defval ?unit ?prefix
+WHERE {
+ conf:%s task:hasExecutionParameter ?par .
+ ?par param:paramName ?name ;
+ param:hasType ?ptype ;
+ a ?dim .
+ {
+ ?dim rdfs:subClassOf unit:BaseDimension .
+ } UNION {
+ ?dim rdfs:subClassOf ?p .
+ ?p rdfs:subClassOf unit:DerivedDimension .
+ }
+ OPTIONAL {
+ ?par unit:hasValue ?defval .
+ OPTIONAL {
+ ?par unit:hasUnit ?unit .
+ OPTIONAL {
+ ?par unit:hasPrefix ?prefix .
+ }
+ }
+ }
+}
+ """ % (self.ontology._tail(task))
+ paramlist = ParameterList()
+ for name, uri_ptype, uri_dim, default, uri_unit, uri_prefix in self.ontology.query(q):
+ p = self.translateParameter(str(name), uri_dim, uri_unit, uri_prefix, uri_ptype, default)
+ paramlist.append(p)
+ return paramlist
+
+ def inferObligatoryParametersOf(self, feature):
+ '''
+ @summary: extract the parameter list for a given feature
+ @param feature: reference to the monitored feature
+ @type feature: URIRef
+ @return: an initialized list of the parameters for this feature
+ @rtype: ParameterList
+ '''
+ q = """
+SELECT ?name ?ptype ?dim ?defval ?unit ?prefix
+WHERE {
+ feature:%s feature:obligatoryParameter ?par .
+ ?par param:paramName ?name ;
+ param:hasType ?ptype ;
+ param:hasDimension ?dim .
+ OPTIONAL {
+ ?par param:paramValue ?defval .
+ OPTIONAL {
+ ?par param:hasUnit ?unit .
+ OPTIONAL {
+ ?par param:hasPrefix ?prefix .
+ }
+ }
+ }
+}
+ """ % (self.ontology._tail(feature))
+ paramlist = ParameterList()
+ for name, uri_ptype, uri_dim, default, uri_unit, uri_prefix in self.ontology.query(q):
+ p = self.translateParameter(str(name), uri_dim, uri_unit, uri_prefix, uri_ptype, default)
+ paramlist.append(p)
+ return paramlist
+
+ def inferOptionalParametersOf(self, feature):
+ '''
+ @summary: extract the parameter list for a given feature
+ @param feature: reference to the monitored feature
+ @type feature: URIRef
+ @return: an initialized list of the parameters for this feature
+ @rtype: ParameterList
+ '''
+ q = """
+SELECT ?name ?ptype ?dim ?defval ?unit ?prefix
+WHERE {
+ feature:%s feature:optionalParameter ?par .
+ ?par param:paramName ?name ;
+ param:hasType ?ptype ;
+ param:hasDimension ?dim .
+ OPTIONAL {
+ ?par param:paramValue ?defval .
+ OPTIONAL {
+ ?par param:hasUnit ?unit .
+ OPTIONAL {
+ ?par param:hasPrefix ?prefix .
+ }
+ }
+ }
+}
+ """ % (self.ontology._tail(feature))
+ paramlist = ParameterList()
+ for name, uri_ptype, uri_dim, default, uri_unit, uri_prefix in self.ontology.query(q):
+ p = self.translateParameter(str(name), uri_dim, uri_unit, uri_prefix, uri_ptype, default)
+ paramlist.append(p)
+ return paramlist
+
+ def inferFeatureMonitoringParameters(self):
+ '''
+ @summary: extract parameters declared for feature monitoring
+ @return: an iterator over parameters
+ @rtype: (parameter name, dimension, value, unit)
+ '''
+ q = """
+SELECT ?name ?dim ?defval ?unit ?prefix
+WHERE {
+ ?par a feature:FeatureMonitoringParameter ;
+ param:paramName ?name ;
+ param:hasDimension ?dim .
+ OPTIONAL {
+ ?par param:paramValue ?defval .
+ OPTIONAL {
+ ?par param:hasUnit ?unit .
+ OPTIONAL {
+ ?par param:hasPrefix ?prefix .
+ }
+ }
+ }
+}
+ """
+ for name, uri_dim, default, uri_unit, uri_prefix in self.ontology.query(q):
+#FIXME: duplicate (similar thing in translateParameter!!!
+ d = self.dm[ self.ontology._tail(uri_dim) ]
+ if default is None:
+ yield str(name), d, "", d.unit
+ else:
+ if uri_unit is None:
+ if uri_prefix is None:
+ u = d.unit
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), d.unit.reference)
+ u = self.um[ref]
+ else:
+ if uri_prefix is None:
+ u = self.um[ self.ontology._tail(uri_unit) ]
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), self.ontology._tail(uri_unit))
+ u = self.um[ref]
+ yield str(name), d, str(default), u
+
+ def translateParameter(self, name, uri_dim, uri_unit, uri_prefix, uri_ptype, default = None):
+ '''
+ @summary: helper method to instantiate a Parameter
+ @param name: the reference name of the parameter
+ @type name: str
+ @param uri_dim: the dimension of the parameter
+ @type uri_dim: URIRef
+ @param uri_unit: the unit of the parameter, if None we fall back to the unit of the dimension
+ @type uri_unit: URIRef
+ @param uri_prefix: accounts only if uri_unit is not None
+ @type uri_prefix: URIRef
+ @param uri_ptype: the type of the parameter to use for serialization
+ @type uri_ptype: URIRef
+ @param default: the parameter value to initialize with, if None, parameter won't hol a value
+ @type default: Literal
+ @return: a parameter
+ @rtype: Parameter
+ '''
+ vt = self.typelookup[ self.ontology._tail(uri_ptype) ]
+ d = self.dm[ self.ontology._tail(uri_dim) ]
+ if default is None:
+ return Parameter(name = name, valuetype = vt, unitmanager = self.um, dimension = d)
+ else:
+ if uri_unit is None:
+ if uri_prefix is None:
+ u = d.unit
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), d.unit.reference)
+ u = self.um[ref]
+ else:
+ if uri_prefix is None:
+ u = self.um[ self.ontology._tail(uri_unit) ]
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), self.ontology._tail(uri_unit))
+ u = self.um[ref]
+ return Parameter(name = name, valuetype = vt, unitmanager = self.um, dimension = d, default = (vt(default), u))
diff --git a/Monitoring/src/main/python/Semantics/TaskModel.py.old b/Monitoring/src/main/python/Semantics/TaskModel.py.old
new file mode 100644
index 0000000..2cfc06e
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/TaskModel.py.old
@@ -0,0 +1,424 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+from Credential.credentialtypes import UsernamePassword, UsernameRSAKey
+from DataProcessing.Data import DataHeader, DataHeaderCell
+from DataProcessing.Parameter import ParameterList, Parameter
+from Driver.SOAPClient import SOAPClient
+from Driver.SshExec import SshExec
+
+class TaskModelError(Exception):
+ pass
+
+class TaskModel(object):
+ '''
+ classdocs
+ '''
+ hooklookup = {
+ 'hasPreHook' : 'prehook',
+ 'hasStartHook' : 'starthook',
+ 'hasRetrieveHook' : 'retrievehook',
+ 'hasStopHook' : 'stophook',
+ 'hasPostHook' : 'posthook',
+ }
+ typelookup = {
+ 'Integer': int,
+ 'Float': float,
+ 'String': str
+ }
+
+ def __init__(self, dimensionmanager, unitmanager, ontology):
+ '''
+ @summary: constructor
+ @param dimensionmanager: the container to form a cell's dimension
+ @type dimensionmanager: DimensionManager
+ @param unitmanager: the container to form a cell's unit
+ @type unitmanager: UnitManager
+ @param ontology: the basic knowledge
+ @type ontology: Ontology
+ '''
+ self.ontology = ontology
+ self.dm = dimensionmanager
+ self.um = unitmanager
+
+ def inferDomains(self):
+ '''
+ @summary: extract the monitoring domains from the information model
+ @return: generator of the list of domains
+ @rtype: URIRef
+ '''
+ for uri_domain, _, _ in self.ontology.graph.triples((None, self.ontology.ns('rdf')['type'], self.ontology.ns('task')['MonitoringDomain'])):
+ yield uri_domain
+
+ def inferFeatures(self):
+ '''
+ @summary: extract the monitored features from the information model
+ @return: a generator of the list of (feature reference, name) pairs
+ @rtype: (URIRef, str)
+ '''
+ q = """
+SELECT ?feature ?name ?resource
+WHERE {
+ ?feature a owl:NamedIndividual ;
+ a ?parent ;
+ feature:featureName ?name .
+ ?parent rdfs:subClassOf feature:MonitoredFeature .
+ ?resource feature:hasFeature ?feature
+}
+ """
+ for uri_feature, name, uri_resource in self.ontology.query(q):
+ yield uri_feature, str(name), uri_resource
+
+ def inferTasks(self, domain, feature):
+ '''
+ @summary: provides a generator to crawl over the tasks that can measure a given feature in the given domain of interest
+ @param domain: domain of interest
+ @type domain: URIRef
+ @param feature: the feature to measure
+ @type feature: URIRef
+ @return: a generator of the list of (task reference, task name) pairs
+ @rtype: (URIRef, str)
+ '''
+ q = """
+SELECT ?task ?name
+WHERE {
+ ?task a owl:NamedIndividual ;
+ a task:MonitoringTask ;
+ task:name ?name ;
+ task:hasMonitoringDomain task:%s ;
+ task:hasOutputTableFormat ?data .
+?data task:hasColumn ?col .
+?col task:hasMonitoredFeature feature:%s
+}
+ """ % (self.ontology._tail(domain), self.ontology._tail(feature))
+ for uri_task, tname in self.ontology.query(q):
+ yield uri_task, str(tname)
+
+ def inferCredentialOf(self, task):
+ '''
+ @summary: extracts the set of acceptable credential templates the given task accepts
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: a set of an uninitialized Credential classes
+ @rtype: set(Credential)
+ @raise IMError: Unknown authentication type
+ '''
+ creds = set()
+ for (_, _, auth) in self.ontology.graph.triples((task, self.ontology.ns('task')['hasAuthenticationType'], None)):
+ if auth == self.ontology.ns('task')["UsernamePassword"]:
+ creds.add(UsernamePassword)
+ elif auth == self.ontology.ns('task')["UsernameRSAKey"]:
+ creds.add(UsernameRSAKey)
+ else:
+ raise TaskModelError("Unknown authentication type %s" % auth)
+ return creds
+
+ def inferDriverOf(self, task):
+ '''
+ @summary: extarcts the driver of the task
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: the appropriate driver class uninstantiated
+ @rtype: Driver
+ @raise IMError: Unknown driver type / hasDriver missing
+ '''
+ try:
+ _, _, driver = self.ontology.graph.triples((task, self.ontology.ns('task')['hasDriver'], None)).next()
+ if driver == self.ontology.ns('task')["SOAPClient"]:
+ return SOAPClient
+ elif driver == self.ontology.ns('task')["SSH"]:
+ return SshExec
+ else:
+ raise TaskModelError("Unknown driver type %s" % driver)
+ except StopIteration:
+ raise TaskModelError("hasDriver is missing for task %s" % task)
+
+ def inferHookparametersOf(self, task):
+ '''
+ @summary: extract the necessary control parameters for task initialization
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: a lookup table of arguments, which are passed to the Task object's prehook method as keyword arguments
+ @rtype: dict
+ '''
+ q = """
+SELECT ?name ?value ?type
+WHERE {
+ conf:%s task:hasHookParameter ?p .
+ ?p param:paramName ?name ;
+ a owl:NamedIndividual ;
+ rdf:type task:HookParameter ;
+ unit:hasValue ?value ;
+ param:hasType ?type .
+}
+ """ % (self.ontology._tail(task))
+ d = {}
+ for pname, pvalue, ptype in self.ontology.query(q):
+ pname = str(pname)
+ if ptype == self.ontology.ns('param')["Integer"]:
+ d[pname] = int(pvalue)
+ elif ptype == self.ontology.ns('param')["Float"]:
+ d[pname] = float(pvalue)
+ else:
+ d[pname] = str(pvalue)
+ return d
+
+ def inferHookdefinitionsOf(self, task):
+ '''
+ @summary: extract the hook implementation details for task initialization
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: a lookup table of hook definitions
+ @rtype: dict
+ '''
+ q = """
+SELECT ?rel ?value
+WHERE {
+ conf:%s ?rel ?h .
+ ?h task:hookCode ?value .
+}
+ """ % (self.ontology._tail(task))
+ d = {}
+ for hrel, hvalue in self.ontology.query(q):
+ hook = self.ontology._tail(uriref = hrel)
+ d[self.hooklookup[hook]] = str(hvalue).replace('\\n', '\n').replace('\\t', '\t').replace('\\\\', '\\').strip()
+ return d
+
+ def inferDataheaderOf(self, task):
+ '''
+ @summary: extract the data header declaration the for task
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: an initialized DataHeader instance
+ @rtype: DataHeader
+ '''
+ q = """
+SELECT ?tablename ?colname ?dim ?feature ?unit ?prefix
+WHERE {
+ conf:%s task:hasOutputTableFormat ?hdr .
+ ?hdr task:name ?tablename .
+ ?hdr task:hasColumn ?col .
+ ?col task:name ?colname ;
+ a owl:NamedIndividual ;
+ a ?dim ;
+ task:sequenceNumber ?seqno .
+ {
+ ?dim rdfs:subClassOf unit:BaseDimension .
+ } UNION {
+ ?dim rdfs:subClassOf ?p .
+ ?p rdfs:subClassOf unit:DerivedDimension .
+ }
+ OPTIONAL {
+ ?col task:hasMonitoredFeature ?feature .
+ }
+ OPTIONAL {
+ ?col unit:hasUnit ?unit .
+ OPTIONAL {
+ ?col unit:hasPrefix ?prefix .
+ }
+ }
+}
+ORDER BY ?seqno
+ """ % (self.ontology._tail(task))
+ datahdr = None
+ for tablename, colname, uri_dim, uri_feature, uri_unit, uri_prefix in self.ontology.query(q):
+ if datahdr is None:
+ datahdr = DataHeader(str(tablename))
+ if uri_unit is None:
+ u = None
+ elif uri_prefix is None:
+ u = self.um[ self.ontology._tail(uri_unit) ]
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), self.ontology._tail(uri_unit))
+ u = self.um[ref]
+ d = self.dm[ self.ontology._tail(uri_dim) ]
+ if uri_feature is None:
+ cell = DataHeaderCell(name = str(colname), dimension = d, unit = u)
+ else:
+ cell = DataHeaderCell(name = str(colname), dimension = d, feature = uri_feature, unit = u)
+ datahdr.addColumn(cell)
+ return datahdr
+
+ def inferParametersOf(self, task):
+ '''
+ @summary: extract the parameter list for the given task
+ @param task: reference to the monitoring task
+ @type task: URIRef
+ @return: an initialized list of the parameters of the task
+ @rtype: ParameterList
+ '''
+ q = """
+SELECT ?name ?ptype ?dim ?defval ?unit ?prefix
+WHERE {
+ conf:%s task:hasExecutionParameter ?par .
+ ?par param:paramName ?name ;
+ param:hasType ?ptype ;
+ a ?dim .
+ {
+ ?dim rdfs:subClassOf unit:BaseDimension .
+ } UNION {
+ ?dim rdfs:subClassOf ?p .
+ ?p rdfs:subClassOf unit:DerivedDimension .
+ }
+ OPTIONAL {
+ ?par unit:hasValue ?defval .
+ OPTIONAL {
+ ?par unit:hasUnit ?unit .
+ OPTIONAL {
+ ?par unit:hasPrefix ?prefix .
+ }
+ }
+ }
+}
+ """ % (self.ontology._tail(task))
+ paramlist = ParameterList()
+ for name, uri_ptype, uri_dim, default, uri_unit, uri_prefix in self.ontology.query(q):
+ p = self.translateParameter(str(name), uri_dim, uri_unit, uri_prefix, uri_ptype, default)
+ paramlist.append(p)
+ return paramlist
+
+ def inferObligatoryParametersOf(self, feature):
+ '''
+ @summary: extract the parameter list for a given feature
+ @param feature: reference to the monitored feature
+ @type feature: URIRef
+ @return: an initialized list of the parameters for this feature
+ @rtype: ParameterList
+ '''
+ q = """
+SELECT ?name ?ptype ?dim ?defval ?unit ?prefix
+WHERE {
+ feature:%s feature:obligatoryParameter ?par .
+ ?par param:paramName ?name ;
+ param:hasType ?ptype ;
+ param:hasDimension ?dim .
+ OPTIONAL {
+ ?par param:paramValue ?defval .
+ OPTIONAL {
+ ?par param:hasUnit ?unit .
+ OPTIONAL {
+ ?par param:hasPrefix ?prefix .
+ }
+ }
+ }
+}
+ """ % (self.ontology._tail(feature))
+ paramlist = ParameterList()
+ for name, uri_ptype, uri_dim, default, uri_unit, uri_prefix in self.ontology.query(q):
+ p = self.translateParameter(str(name), uri_dim, uri_unit, uri_prefix, uri_ptype, default)
+ paramlist.append(p)
+ return paramlist
+
+ def inferOptionalParametersOf(self, feature):
+ '''
+ @summary: extract the parameter list for a given feature
+ @param feature: reference to the monitored feature
+ @type feature: URIRef
+ @return: an initialized list of the parameters for this feature
+ @rtype: ParameterList
+ '''
+ q = """
+SELECT ?name ?ptype ?dim ?defval ?unit ?prefix
+WHERE {
+ feature:%s feature:optionalParameter ?par .
+ ?par param:paramName ?name ;
+ param:hasType ?ptype ;
+ param:hasDimension ?dim .
+ OPTIONAL {
+ ?par param:paramValue ?defval .
+ OPTIONAL {
+ ?par param:hasUnit ?unit .
+ OPTIONAL {
+ ?par param:hasPrefix ?prefix .
+ }
+ }
+ }
+}
+ """ % (self.ontology._tail(feature))
+ paramlist = ParameterList()
+ for name, uri_ptype, uri_dim, default, uri_unit, uri_prefix in self.ontology.query(q):
+ p = self.translateParameter(str(name), uri_dim, uri_unit, uri_prefix, uri_ptype, default)
+ paramlist.append(p)
+ return paramlist
+
+ def inferFeatureMonitoringParameters(self):
+ '''
+ @summary: extract parameters declared for feature monitoring
+ @return: an iterator over parameters
+ @rtype: (parameter name, dimension, value, unit)
+ '''
+ q = """
+SELECT ?name ?dim ?defval ?unit ?prefix
+WHERE {
+ ?par a feature:FeatureMonitoringParameter ;
+ param:paramName ?name ;
+ param:hasDimension ?dim .
+ OPTIONAL {
+ ?par param:paramValue ?defval .
+ OPTIONAL {
+ ?par param:hasUnit ?unit .
+ OPTIONAL {
+ ?par param:hasPrefix ?prefix .
+ }
+ }
+ }
+}
+ """
+ for name, uri_dim, default, uri_unit, uri_prefix in self.ontology.query(q):
+#FIXME: duplicate (similar thing in translateParameter!!!
+ d = self.dm[ self.ontology._tail(uri_dim) ]
+ if default is None:
+ yield str(name), d, "", d.unit
+ else:
+ if uri_unit is None:
+ if uri_prefix is None:
+ u = d.unit
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), d.unit.reference)
+ u = self.um[ref]
+ else:
+ if uri_prefix is None:
+ u = self.um[ self.ontology._tail(uri_unit) ]
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), self.ontology._tail(uri_unit))
+ u = self.um[ref]
+ yield str(name), d, str(default), u
+
+ def translateParameter(self, name, uri_dim, uri_unit, uri_prefix, uri_ptype, default = None):
+ '''
+ @summary: helper method to instantiate a Parameter
+ @param name: the reference name of the parameter
+ @type name: str
+ @param uri_dim: the dimension of the parameter
+ @type uri_dim: URIRef
+ @param uri_unit: the unit of the parameter, if None we fall back to the unit of the dimension
+ @type uri_unit: URIRef
+ @param uri_prefix: accounts only if uri_unit is not None
+ @type uri_prefix: URIRef
+ @param uri_ptype: the type of the parameter to use for serialization
+ @type uri_ptype: URIRef
+ @param default: the parameter value to initialize with, if None, parameter won't hol a value
+ @type default: Literal
+ @return: a parameter
+ @rtype: Parameter
+ '''
+ vt = self.typelookup[ self.ontology._tail(uri_ptype) ]
+ d = self.dm[ self.ontology._tail(uri_dim) ]
+ if default is None:
+ return Parameter(name = name, valuetype = vt, unitmanager = self.um, dimension = d)
+ else:
+ if uri_unit is None:
+ if uri_prefix is None:
+ u = d.unit
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), d.unit.reference)
+ u = self.um[ref]
+ else:
+ if uri_prefix is None:
+ u = self.um[ self.ontology._tail(uri_unit) ]
+ else:
+ ref = "%s_%s" % (self.ontology._tail(uri_prefix), self.ontology._tail(uri_unit))
+ u = self.um[ref]
+ return Parameter(name = name, valuetype = vt, unitmanager = self.um, dimension = d, default = (vt(default), u))
diff --git a/Monitoring/src/main/python/Semantics/UnitModel$py.class b/Monitoring/src/main/python/Semantics/UnitModel$py.class
new file mode 100644
index 0000000..8578183
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/UnitModel$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Semantics/UnitModel.py b/Monitoring/src/main/python/Semantics/UnitModel.py
new file mode 100644
index 0000000..af8ec74
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/UnitModel.py
@@ -0,0 +1,364 @@
+'''
+Created on Feb 12, 2012
+
+@author: steger
+'''
+
+class UnitModel(object):
+ '''
+ @summary: an interface to infer prefix, unit and dimension related information from the model
+ '''
+
+ def __init__(self, ontology):
+ '''
+ @summary: constructor
+ @param ontology: the basic knowledge
+ @type ontology: Ontology
+ '''
+ self.ontology = ontology
+
+ def inferPrefixes(self):
+ '''
+ @summary: iterate over all prefixes defined in the model.
+ @return: a generator of the prefix details: (reference, symbol, base, exponent)
+ @rtype: (URIRef, str, int, int)
+ @todo: in case the unit:base is not present in a Prefix individual,
+ we should fall back to the restriction on the base defined for the given sibling of the Prefix.
+ This sibling is referenced ?basegroup in the query.
+ '''
+ q = """
+SELECT ?prefix ?symbol ?base ?exponent
+WHERE {
+ ?prefix a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a ?basegroup ;
+ unit:exponent ?exponent ;
+ unit:base ?base .
+ ?basegroup rdfs:subClassOf unit:Prefix .
+ OPTIONAL {
+ ?prefix unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_prefix, symbol, base, exponent in self.ontology.query(q):
+ if symbol is None:
+ yield uri_prefix, self.ontology._tail(uri_prefix), int(base), int(exponent)
+ else:
+ yield uri_prefix, str(symbol), int(base), int(exponent)
+
+ def inferPrefixSymbolOf(self, prefixuri):
+ '''
+ @summary: generates an short written form of a unit prefix if unit:symbol is present in the model,
+ otherwise an abbreviation is derived from the tail of the uri (the reference name to the individual).
+ @param prefixuri: the uri reference to the unit prefix
+ @type prefixuri: URIRef
+ @return: the short form
+ @rtype: str
+ '''
+ try:
+ _, _, symbol = self.ontology.graph.triples((prefixuri, self.ontology.ns('unit')['symbol'], None)).next()
+ return str(symbol)
+ except StopIteration:
+ return self.ontology._tail(prefixuri)
+
+
+ def inferBaseUnits(self):
+ '''
+ @summary: iterate over all BaseUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol)
+ @rtype: (URIRef, str)
+ '''
+ q = """
+SELECT ?unit ?symbol
+WHERE {
+ ?unit a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a unit:BaseUnit .
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_unit, symbol in self.ontology.query(q):
+ if symbol is None:
+ yield uri_unit, self.ontology._tail(uri_unit)
+ else:
+ yield uri_unit, str(symbol)
+
+ def inferPowerUnits(self):
+ '''
+ @summary: iterate over all PowerUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol, powerof, exponent)
+ @rtype: (URIRef, str, URIRef, int)
+ '''
+ q = """
+SELECT ?unit ?symbol ?powerof ?exponent
+WHERE {
+ ?unit a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a unit:PowerUnit ;
+ unit:exponent ?exponent ;
+ unit:derivedFrom ?powerof .
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_unit, symbol, uri_powerof, exponent in self.ontology.query(q):
+ if symbol is None:
+ yield uri_unit, self.ontology._tail(uri_unit), uri_powerof, int(exponent)
+ else:
+ yield uri_unit, str(symbol), uri_powerof, int(exponent)
+
+ def inferProductUnits(self):
+ '''
+ @summary: iterate over all ProductUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol, productof)
+ @rtype: (URIRef, str, set(URIRef))
+ '''
+ q = """
+SELECT ?unit ?symbol ?productof
+WHERE {
+ ?unit a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a unit:ProductUnit ;
+ unit:derivedFrom ?productof
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ container = {}
+ for uri_unit, symbol, uri_productof in self.ontology.query(q):
+ if symbol is None:
+ key = uri_unit, self.ontology_tail(uri_unit)
+ else:
+ key = uri_unit, str(symbol)
+ if not container.has_key(key):
+ container[key] = set()
+ container[key].add(uri_productof)
+ for (uri_unit, symbol), productof in container.iteritems():
+ yield uri_unit, symbol, productof
+
+ def inferLinearTransformedUnits(self):
+ '''
+ @summary: iterate over all LinearTransformedUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol, derivedfrom, scale, offset)
+ @rtype: (URIRef, str, URIRef, float, float)
+ '''
+ q = """
+SELECT ?unit ?symbol ?scale ?offset ?derivedfrom
+WHERE {
+ ?unit a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a unit:LinearTransformedUnit ;
+ unit:derivedFrom ?derivedfrom ;
+ unit:scale ?scale .
+ OPTIONAL {
+ ?unit unit:offset ?offset .
+ }
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_unit, symbol, scale, offset, uri_derivedfrom in self.ontology.query(q):
+ if offset is None:
+ offset = 0
+ else:
+ offset = self.ontology._float(offset)
+ if symbol is None:
+ yield uri_unit, self.ontology._tail(uri_unit), uri_derivedfrom, self.ontology._float(scale), offset
+ else:
+ yield uri_unit, str(symbol), uri_derivedfrom, self.ontology._float(scale), offset
+
+ def inferRegexpTransformedUnits(self):
+ '''
+ @summary: iterate over all RegexpTransformedUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol, derivedfrom, expr_fwd, expr_inv)
+ @rtype: (URIRef, str, URIRef, str, str)
+ '''
+ q = """
+SELECT ?unit ?symbol ?derivedfrom ?fwd ?inv
+WHERE {
+ ?unit a <http://www.w3.org/2002/07/owl#NamedIndividual> ;
+ a unit:RegexpTransformedUnit ;
+ unit:derivedFrom ?derivedfrom ;
+ unit:forwardExpression ?fwd ;
+ unit:inverseExpression ?inv .
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_unit, symbol, uri_derivedfrom, expr_fwd, expr_inv in self.ontology.query(q):
+ if symbol is None:
+ yield uri_unit, self.ontology._tail(uri_unit), uri_derivedfrom, str(expr_fwd), str(expr_inv)
+ else:
+ yield uri_unit, str(symbol), uri_derivedfrom, str(expr_fwd), str(expr_inv)
+
+ def inferPossiblePrefixesOf(self, uri_unit):
+ '''
+ @summary: extract possible prefixes for the given unit
+ @param unit: reference to the unit
+ @type unit: URIRef
+ @return: a generator over the references of the possible unit prefixes
+ @rtype: URIRef
+ '''
+ for _, _, uri_prefix in self.ontology.graph.triples((uri_unit, self.ontology.ns('unit')['possiblePrefix'], None)):
+ yield uri_prefix
+
+ def inferBaseDimensions(self):
+ '''
+ @summary: extract BaseDimensions and their corresponding units from the model
+ @return: a generator of the BaseDimension details: (reference, unit, level)
+ @rtype: (URIRef, URIRef, str)
+ '''
+ q = """
+SELECT ?dimension ?unit ?level
+WHERE {
+ ?dimension rdfs:subClassOf unit:BaseDimension ;
+ rdfs:subClassOf ?constraint ;
+ rdfs:subClassOf ?level .
+ ?constraint owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ FILTER regex(?level, "Level") .
+}
+ """
+ for uri_dimension, uri_unit, level in self.ontology.query(q):
+ yield uri_dimension, uri_unit, level
+
+ def inferDifferenceDimensions(self):
+ '''
+ @summary: extract DifferenceDimensions and their corresponding units from the model
+ @return: a generator of the DifferenceDimension details: (reference, unit, derivedfrom)
+ @rtype: (URIRef, URIRef, URIRef)
+ '''
+ q = """
+SELECT ?dimension ?unit ?derivedFrom
+WHERE {
+ ?dimension rdfs:subClassOf unit:DifferenceDimension ;
+ rdfs:subClassOf ?constraint1 ;
+ rdfs:subClassOf ?constraint2 .
+ ?constraint1 owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ ?constraint2 owl:onProperty unit:derivedFrom ;
+ owl:onClass ?derivedFrom .
+}
+ """
+ for uri_dimension, uri_unit, uri_derivedfrom in self.ontology.query(q):
+ yield uri_dimension, uri_unit, uri_derivedfrom
+
+ def inferPowerDimensions(self):
+ '''
+ @summary: extract PowerDimensions and their corresponding units from the model
+ @return: a generator of the PowerDimension details: (reference, unit, derivedfrom, exponent)
+ @rtype: (URIRef, URIRef, URIRef, int)
+ '''
+ q = """
+SELECT ?dimension ?unit ?derivedFrom ?exponent
+WHERE {
+ ?dimension rdfs:subClassOf unit:PowerDimension ;
+ rdfs:subClassOf ?constraint1 ;
+ rdfs:subClassOf ?constraint2 ;
+ rdfs:subClassOf ?constraint3 .
+ ?constraint1 owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ ?constraint2 owl:onProperty unit:derivedFrom ;
+ owl:onClass ?derivedFrom .
+ ?constraint3 owl:onProperty unit:exponent ;
+ owl:hasValue ?exponent .
+}
+ """
+ for uri_dimension, uri_unit, uri_derivedfrom, exponent in self.ontology.query(q):
+ yield uri_dimension, uri_unit, uri_derivedfrom, int(exponent)
+
+ def inferProductDimensions(self):
+ '''
+ @summary: extract ProductDimensions and their corresponding units from the model
+ @return: a generator of the ProductDimension details: (reference, unit, set of derivedfrom references)
+ @rtype: (URIRef, URIRef, tuple(URIRef))
+ '''
+ q = """
+SELECT ?dimension ?unit ?derivedFrom
+WHERE {
+ ?dimension rdfs:subClassOf unit:ProductDimension ;
+ rdfs:subClassOf ?constraint1 ;
+ rdfs:subClassOf ?constraint2 .
+ ?constraint1 owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ ?constraint2 owl:onProperty unit:derivedFrom ;
+ owl:onClass ?derivedFrom .
+}
+ """
+ container = {}
+ for uri_dimension, uri_unit, uri_derivedfrom in self.ontology.query(q):
+ if not container.has_key(uri_dimension):
+ container[uri_dimension] = (uri_unit, set())
+ container[uri_dimension][1].add(uri_derivedfrom)
+ for uri_dimension, (uri_unit, set_derivedfrom) in container.iteritems():
+ yield uri_dimension, uri_unit, tuple(set_derivedfrom)
+
+ def inferRatioDimensions(self):
+ '''
+ @summary: extract RatioDimensions and their corresponding units from the model
+ @return: a generator of the RatioDimension details: (reference, unit, derivedfrom)
+ @rtype: (URIRef, URIRef, URIRef)
+ '''
+ q = """
+SELECT ?dimension ?unit ?derivedFrom
+WHERE {
+ ?dimension rdfs:subClassOf unit:RatioDimension ;
+ rdfs:subClassOf ?constraint1 ;
+ rdfs:subClassOf ?constraint2 .
+ ?constraint1 owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ ?constraint2 owl:onProperty unit:derivedFrom ;
+ owl:onClass ?derivedFrom .
+}
+ """
+ for uri_dimension, uri_unit, uri_derivedfrom in self.ontology.query(q):
+ yield uri_dimension, uri_unit, uri_derivedfrom
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+#OBSOLETED:
+ def inferUnitSymbolOf(self, unituri, prefixuri = None):
+ raise Exception("OBSOLETE")
+# '''
+# @summary: generates an short written form of a unit if symbols are present for the prefix (optional) and the unit itself.
+# If either of them is missing abbreviation is derived from the tail of the uri (name of the individual).
+# @param unituri: the unit
+# @type unituri: URIRef
+# @param prefixuri: the prefix (optional)
+# @type prefixuri: URIRef or None
+# @return: the short form
+# @rtype: str
+# '''
+# if prefixuri is None:
+# prefix = ""
+# else:
+# try:
+# _, _, symbol = self.graph.triples((prefixuri, self.ns('unit')['symbol'], None)).next()
+# prefix = str(symbol)
+# except StopIteration:
+# prefix = self._tail(prefixuri)
+# try:
+# _, _, symbol = self.graph.triples((unituri, self.ns('unit')['symbol'], None)).next()
+# return "%s%s" % (prefix, str(symbol))
+# except StopIteration:
+# return "%s%s" % (prefix, self._tail(unituri))
+
diff --git a/Monitoring/src/main/python/Semantics/UnitModel.py.old b/Monitoring/src/main/python/Semantics/UnitModel.py.old
new file mode 100644
index 0000000..2906615
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/UnitModel.py.old
@@ -0,0 +1,364 @@
+'''
+Created on Feb 12, 2012
+
+@author: steger
+'''
+
+class UnitModel(object):
+ '''
+ @summary: an interface to infer prefix, unit and dimension related information from the model
+ '''
+
+ def __init__(self, ontology):
+ '''
+ @summary: constructor
+ @param ontology: the basic knowledge
+ @type ontology: Ontology
+ '''
+ self.ontology = ontology
+
+ def inferPrefixes(self):
+ '''
+ @summary: iterate over all prefixes defined in the model.
+ @return: a generator of the prefix details: (reference, symbol, base, exponent)
+ @rtype: (URIRef, str, int, int)
+ @todo: in case the unit:base is not present in a Prefix individual,
+ we should fall back to the restriction on the base defined for the given sibling of the Prefix.
+ This sibling is referenced ?basegroup in the query.
+ '''
+ q = """
+SELECT ?prefix ?symbol ?base ?exponent
+WHERE {
+ ?prefix a owl:NamedIndividual ;
+ a ?basegroup ;
+ unit:exponent ?exponent ;
+ unit:base ?base .
+ ?basegroup rdfs:subClassOf unit:Prefix .
+ OPTIONAL {
+ ?prefix unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_prefix, symbol, base, exponent in self.ontology.query(q):
+ if symbol is None:
+ yield uri_prefix, self.ontology._tail(uri_prefix), int(base), int(exponent)
+ else:
+ yield uri_prefix, str(symbol), int(base), int(exponent)
+
+ def inferPrefixSymbolOf(self, prefixuri):
+ '''
+ @summary: generates an short written form of a unit prefix if unit:symbol is present in the model,
+ otherwise an abbreviation is derived from the tail of the uri (the reference name to the individual).
+ @param prefixuri: the uri reference to the unit prefix
+ @type prefixuri: URIRef
+ @return: the short form
+ @rtype: str
+ '''
+ try:
+ _, _, symbol = self.ontology.graph.triples((prefixuri, self.ontology.ns('unit')['symbol'], None)).next()
+ return str(symbol)
+ except StopIteration:
+ return self.ontology._tail(prefixuri)
+
+
+ def inferBaseUnits(self):
+ '''
+ @summary: iterate over all BaseUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol)
+ @rtype: (URIRef, str)
+ '''
+ q = """
+SELECT ?unit ?symbol
+WHERE {
+ ?unit a owl:NamedIndividual ;
+ a unit:BaseUnit .
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_unit, symbol in self.ontology.query(q):
+ if symbol is None:
+ yield uri_unit, self.ontology._tail(uri_unit)
+ else:
+ yield uri_unit, str(symbol)
+
+ def inferPowerUnits(self):
+ '''
+ @summary: iterate over all PowerUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol, powerof, exponent)
+ @rtype: (URIRef, str, URIRef, int)
+ '''
+ q = """
+SELECT ?unit ?symbol ?powerof ?exponent
+WHERE {
+ ?unit a owl:NamedIndividual ;
+ a unit:PowerUnit ;
+ unit:exponent ?exponent ;
+ unit:derivedFrom ?powerof .
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_unit, symbol, uri_powerof, exponent in self.ontology.query(q):
+ if symbol is None:
+ yield uri_unit, self.ontology._tail(uri_unit), uri_powerof, int(exponent)
+ else:
+ yield uri_unit, str(symbol), uri_powerof, int(exponent)
+
+ def inferProductUnits(self):
+ '''
+ @summary: iterate over all ProductUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol, productof)
+ @rtype: (URIRef, str, set(URIRef))
+ '''
+ q = """
+SELECT ?unit ?symbol ?productof
+WHERE {
+ ?unit a owl:NamedIndividual ;
+ a unit:ProductUnit ;
+ unit:derivedFrom ?productof
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ container = {}
+ for uri_unit, symbol, uri_productof in self.ontology.query(q):
+ if symbol is None:
+ key = uri_unit, self.ontology_tail(uri_unit)
+ else:
+ key = uri_unit, str(symbol)
+ if not container.has_key(key):
+ container[key] = set()
+ container[key].add(uri_productof)
+ for (uri_unit, symbol), productof in container.iteritems():
+ yield uri_unit, symbol, productof
+
+ def inferLinearTransformedUnits(self):
+ '''
+ @summary: iterate over all LinearTransformedUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol, derivedfrom, scale, offset)
+ @rtype: (URIRef, str, URIRef, float, float)
+ '''
+ q = """
+SELECT ?unit ?symbol ?scale ?offset ?derivedfrom
+WHERE {
+ ?unit a owl:NamedIndividual ;
+ a unit:LinearTransformedUnit ;
+ unit:derivedFrom ?derivedfrom ;
+ unit:scale ?scale .
+ OPTIONAL {
+ ?unit unit:offset ?offset .
+ }
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_unit, symbol, scale, offset, uri_derivedfrom in self.ontology.query(q):
+ if offset is None:
+ offset = 0
+ else:
+ offset = self.ontology._float(offset)
+ if symbol is None:
+ yield uri_unit, self.ontology._tail(uri_unit), uri_derivedfrom, self.ontology._float(scale), offset
+ else:
+ yield uri_unit, str(symbol), uri_derivedfrom, self.ontology._float(scale), offset
+
+ def inferRegexpTransformedUnits(self):
+ '''
+ @summary: iterate over all RegexpTransformedUnits defined in the model.
+ @return: a generator of the unit details: (reference, symbol, derivedfrom, expr_fwd, expr_inv)
+ @rtype: (URIRef, str, URIRef, str, str)
+ '''
+ q = """
+SELECT ?unit ?symbol ?derivedfrom ?fwd ?inv
+WHERE {
+ ?unit a owl:NamedIndividual ;
+ a unit:RegexpTransformedUnit ;
+ unit:derivedFrom ?derivedfrom ;
+ unit:forwardExpression ?fwd ;
+ unit:inverseExpression ?inv .
+ OPTIONAL {
+ ?unit unit:symbol ?symbol .
+ }
+}
+ """
+ for uri_unit, symbol, uri_derivedfrom, expr_fwd, expr_inv in self.ontology.query(q):
+ if symbol is None:
+ yield uri_unit, self.ontology._tail(uri_unit), uri_derivedfrom, str(expr_fwd), str(expr_inv)
+ else:
+ yield uri_unit, str(symbol), uri_derivedfrom, str(expr_fwd), str(expr_inv)
+
+ def inferPossiblePrefixesOf(self, uri_unit):
+ '''
+ @summary: extract possible prefixes for the given unit
+ @param unit: reference to the unit
+ @type unit: URIRef
+ @return: a generator over the references of the possible unit prefixes
+ @rtype: URIRef
+ '''
+ for _, _, uri_prefix in self.ontology.graph.triples((uri_unit, self.ontology.ns('unit')['possiblePrefix'], None)):
+ yield uri_prefix
+
+ def inferBaseDimensions(self):
+ '''
+ @summary: extract BaseDimensions and their corresponding units from the model
+ @return: a generator of the BaseDimension details: (reference, unit, level)
+ @rtype: (URIRef, URIRef, str)
+ '''
+ q = """
+SELECT ?dimension ?unit ?level
+WHERE {
+ ?dimension rdfs:subClassOf unit:BaseDimension ;
+ rdfs:subClassOf ?constraint ;
+ rdfs:subClassOf ?level .
+ ?constraint owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ FILTER regex(?level, "Level") .
+}
+ """
+ for uri_dimension, uri_unit, level in self.ontology.query(q):
+ yield uri_dimension, uri_unit, level
+
+ def inferDifferenceDimensions(self):
+ '''
+ @summary: extract DifferenceDimensions and their corresponding units from the model
+ @return: a generator of the DifferenceDimension details: (reference, unit, derivedfrom)
+ @rtype: (URIRef, URIRef, URIRef)
+ '''
+ q = """
+SELECT ?dimension ?unit ?derivedFrom
+WHERE {
+ ?dimension rdfs:subClassOf unit:DifferenceDimension ;
+ rdfs:subClassOf ?constraint1 ;
+ rdfs:subClassOf ?constraint2 .
+ ?constraint1 owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ ?constraint2 owl:onProperty unit:derivedFrom ;
+ owl:onClass ?derivedFrom .
+}
+ """
+ for uri_dimension, uri_unit, uri_derivedfrom in self.ontology.query(q):
+ yield uri_dimension, uri_unit, uri_derivedfrom
+
+ def inferPowerDimensions(self):
+ '''
+ @summary: extract PowerDimensions and their corresponding units from the model
+ @return: a generator of the PowerDimension details: (reference, unit, derivedfrom, exponent)
+ @rtype: (URIRef, URIRef, URIRef, int)
+ '''
+ q = """
+SELECT ?dimension ?unit ?derivedFrom ?exponent
+WHERE {
+ ?dimension rdfs:subClassOf unit:PowerDimension ;
+ rdfs:subClassOf ?constraint1 ;
+ rdfs:subClassOf ?constraint2 ;
+ rdfs:subClassOf ?constraint3 .
+ ?constraint1 owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ ?constraint2 owl:onProperty unit:derivedFrom ;
+ owl:onClass ?derivedFrom .
+ ?constraint3 owl:onProperty unit:exponent ;
+ owl:hasValue ?exponent .
+}
+ """
+ for uri_dimension, uri_unit, uri_derivedfrom, exponent in self.ontology.query(q):
+ yield uri_dimension, uri_unit, uri_derivedfrom, int(exponent)
+
+ def inferProductDimensions(self):
+ '''
+ @summary: extract ProductDimensions and their corresponding units from the model
+ @return: a generator of the ProductDimension details: (reference, unit, set of derivedfrom references)
+ @rtype: (URIRef, URIRef, tuple(URIRef))
+ '''
+ q = """
+SELECT ?dimension ?unit ?derivedFrom
+WHERE {
+ ?dimension rdfs:subClassOf unit:ProductDimension ;
+ rdfs:subClassOf ?constraint1 ;
+ rdfs:subClassOf ?constraint2 .
+ ?constraint1 owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ ?constraint2 owl:onProperty unit:derivedFrom ;
+ owl:onClass ?derivedFrom .
+}
+ """
+ container = {}
+ for uri_dimension, uri_unit, uri_derivedfrom in self.ontology.query(q):
+ if not container.has_key(uri_dimension):
+ container[uri_dimension] = (uri_unit, set())
+ container[uri_dimension][1].add(uri_derivedfrom)
+ for uri_dimension, (uri_unit, set_derivedfrom) in container.iteritems():
+ yield uri_dimension, uri_unit, tuple(set_derivedfrom)
+
+ def inferRatioDimensions(self):
+ '''
+ @summary: extract RatioDimensions and their corresponding units from the model
+ @return: a generator of the RatioDimension details: (reference, unit, derivedfrom)
+ @rtype: (URIRef, URIRef, URIRef)
+ '''
+ q = """
+SELECT ?dimension ?unit ?derivedFrom
+WHERE {
+ ?dimension rdfs:subClassOf unit:RatioDimension ;
+ rdfs:subClassOf ?constraint1 ;
+ rdfs:subClassOf ?constraint2 .
+ ?constraint1 owl:onProperty unit:defaultUnit ;
+ owl:hasValue ?unit .
+ ?constraint2 owl:onProperty unit:derivedFrom ;
+ owl:onClass ?derivedFrom .
+}
+ """
+ for uri_dimension, uri_unit, uri_derivedfrom in self.ontology.query(q):
+ yield uri_dimension, uri_unit, uri_derivedfrom
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+#OBSOLETED:
+ def inferUnitSymbolOf(self, unituri, prefixuri = None):
+ raise Exception("OBSOLETE")
+# '''
+# @summary: generates an short written form of a unit if symbols are present for the prefix (optional) and the unit itself.
+# If either of them is missing abbreviation is derived from the tail of the uri (name of the individual).
+# @param unituri: the unit
+# @type unituri: URIRef
+# @param prefixuri: the prefix (optional)
+# @type prefixuri: URIRef or None
+# @return: the short form
+# @rtype: str
+# '''
+# if prefixuri is None:
+# prefix = ""
+# else:
+# try:
+# _, _, symbol = self.graph.triples((prefixuri, self.ns('unit')['symbol'], None)).next()
+# prefix = str(symbol)
+# except StopIteration:
+# prefix = self._tail(prefixuri)
+# try:
+# _, _, symbol = self.graph.triples((unituri, self.ns('unit')['symbol'], None)).next()
+# return "%s%s" % (prefix, str(symbol))
+# except StopIteration:
+# return "%s%s" % (prefix, self._tail(unituri))
+
diff --git a/Monitoring/src/main/python/Semantics/__init__$py.class b/Monitoring/src/main/python/Semantics/__init__$py.class
new file mode 100644
index 0000000..f47471b
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/__init__$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Semantics/__init__.py b/Monitoring/src/main/python/Semantics/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/__init__.py
diff --git a/Monitoring/src/main/python/Semantics/__init__.py.old b/Monitoring/src/main/python/Semantics/__init__.py.old
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/__init__.py.old
diff --git a/Monitoring/src/main/python/Semantics/a b/Monitoring/src/main/python/Semantics/a
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/a
diff --git a/Monitoring/src/main/python/Semantics/b b/Monitoring/src/main/python/Semantics/b
new file mode 100644
index 0000000..7b8569a
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/b
@@ -0,0 +1,55 @@
+EEEEE
+======================================================================
+ERROR: test_IM_domainsfeatures (__main__.Test)
+----------------------------------------------------------------------
+Traceback (most recent call last):
+ File "test.py", line 28, in setUp
+ dim_ipaddress = self.MS_planetlab.dm['IPAddress']
+ File "/home/laki/work/NOVI/MonitoringJYTHON_130118/Mon2/DataProcessing/Dimension.py", line 302, in __getitem__
+ raise DimensionError("Dimension with reference %s not found" % reference)
+DimensionError: Dimension with reference IPAddress not found
+
+======================================================================
+ERROR: test_IM_task (__main__.Test)
+----------------------------------------------------------------------
+Traceback (most recent call last):
+ File "test.py", line 28, in setUp
+ dim_ipaddress = self.MS_planetlab.dm['IPAddress']
+ File "/home/laki/work/NOVI/MonitoringJYTHON_130118/Mon2/DataProcessing/Dimension.py", line 302, in __getitem__
+ raise DimensionError("Dimension with reference %s not found" % reference)
+DimensionError: Dimension with reference IPAddress not found
+
+======================================================================
+ERROR: test_managers (__main__.Test)
+----------------------------------------------------------------------
+Traceback (most recent call last):
+ File "test.py", line 28, in setUp
+ dim_ipaddress = self.MS_planetlab.dm['IPAddress']
+ File "/home/laki/work/NOVI/MonitoringJYTHON_130118/Mon2/DataProcessing/Dimension.py", line 302, in __getitem__
+ raise DimensionError("Dimension with reference %s not found" % reference)
+DimensionError: Dimension with reference IPAddress not found
+
+======================================================================
+ERROR: test_owlexamples (__main__.Test)
+----------------------------------------------------------------------
+Traceback (most recent call last):
+ File "test.py", line 28, in setUp
+ dim_ipaddress = self.MS_planetlab.dm['IPAddress']
+ File "/home/laki/work/NOVI/MonitoringJYTHON_130118/Mon2/DataProcessing/Dimension.py", line 302, in __getitem__
+ raise DimensionError("Dimension with reference %s not found" % reference)
+DimensionError: Dimension with reference IPAddress not found
+
+======================================================================
+ERROR: test_taskBYuri (__main__.Test)
+----------------------------------------------------------------------
+Traceback (most recent call last):
+ File "test.py", line 28, in setUp
+ dim_ipaddress = self.MS_planetlab.dm['IPAddress']
+ File "/home/laki/work/NOVI/MonitoringJYTHON_130118/Mon2/DataProcessing/Dimension.py", line 302, in __getitem__
+ raise DimensionError("Dimension with reference %s not found" % reference)
+DimensionError: Dimension with reference IPAddress not found
+
+----------------------------------------------------------------------
+Ran 5 tests in 0.014s
+
+FAILED (errors=5)
diff --git a/Monitoring/src/main/python/Semantics/test.py b/Monitoring/src/main/python/Semantics/test.py
new file mode 100644
index 0000000..70d335d
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/test.py
@@ -0,0 +1,263 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+import site
+site.addsitedir('../site-packages')
+
+
+import unittest2
+from rdflib import Graph
+from Example.Metrics import RoundTripDelay
+from Example.Tools import sonomashortping
+from DataProcessing.Parameter import ParameterList, Parameter
+from Example.credentials import ple_credentials
+from Example.Platforms import FRAMEWORK
+from DataProcessing.DataHeaderCell import CellRequestByName
+from DataProcessing.DataError import SamplerError
+from DataProcessing.DataReader import DataReader
+import pkgutil
+import StringIO
+import monitoringmodel.im
+import os.path
+
+
+class Test(unittest2.TestCase):
+
+ def setUp(self):
+ self.MS_planetlab = FRAMEWORK.getService('PlanetLab')
+ self.MS_federica = FRAMEWORK.getService('FEDERICA')
+
+ dim_ipaddress = self.MS_planetlab.dm['IPAddress']
+ node = ("157.181.175.243", self.MS_planetlab.um.ipv4dotted)
+ self.p_src_eltenode = Parameter(name = "SourceAddress", valuetype = str, unitmanager = self.MS_planetlab.um, dimension = dim_ipaddress, default = node)
+ node = ("147.102.22.66", self.MS_planetlab.um.ipv4dotted)
+ self.p_dst_ntuanode = Parameter(name = "DestinationAddress", valuetype = str, unitmanager = self.MS_planetlab.um, dimension = dim_ipaddress, default = node)
+ node = ("192.168.31.1", self.MS_planetlab.um.ipv4dotted)
+ self.p_src_fednode = Parameter(name = "SourceAddress", valuetype = str, unitmanager = self.MS_planetlab.um, dimension = dim_ipaddress, default = node)
+ node = ("192.168.31.9", self.MS_planetlab.um.ipv4dotted)
+ self.p_dst_fednode = Parameter(name = "DestinationAddress", valuetype = str, unitmanager = self.MS_planetlab.um, dimension = dim_ipaddress, default = node)
+
+ self.substrate = self.MS_planetlab.ontology.ns('task')['Substrate']
+ self.slice = self.MS_planetlab.ontology.ns('task')['Slice']
+
+ self.feat_task = {
+ 'OnewayDelay': (['SONoMAChirp'], []),
+ 'RoundtripDelay': (['SONoMAPing'], ['sshpingSlice']),
+ 'AvailableBandwidth': ([], ['sshabSlice']),
+ 'AvailableMemory': (['sshMeminfo'], ['sshMeminfoSlice']),
+ 'FreeMemory': (['sshMeminfo'], ['sshMeminfoSlice']),
+ 'MemoryUtilization': (['sshMeminfo'], ['sshMeminfoSlice']),
+ 'Uptime': (['sshuptime'], ['sshuptimeSlice']),
+ 'CPULoad': (['sshcpuload'], ['sshcpuloadSlice']),
+ 'CPUCores': (['sshcpuload'], ['sshcpuloadSlice']),
+ 'CPUUtilization': (['sshcpuload'], ['sshcpuloadSlice']),
+ 'FreeDiskSpace': (['sshdiskinfo'], ['sshdiskinfoSlice']),
+ 'UsedDiskSpace': (['sshdiskinfo'], ['sshdiskinfoSlice'])
+ }
+
+ dim_nameofsomething = self.MS_planetlab.dm['NameOfSomething']
+ self.slicename = Parameter(name = "SliceName", valuetype = str,
+ unitmanager = self.MS_planetlab.um, dimension = dim_nameofsomething,
+ default = ('novi_novi', self.MS_planetlab.um.unitless))
+ dim_countable = self.MS_planetlab.dm['Countable']
+ self.count = Parameter(name = 'Count', valuetype = int,
+ unitmanager = self.MS_planetlab.um, dimension = dim_countable,
+ default = (5, self.MS_planetlab.um.piece))
+
+ def test_managers(self):
+ expect = 14
+ infer = len(self.MS_planetlab.pm)
+ self.assertEqual(infer, expect, "Prefix: got %d expect %d" % (infer, expect))
+
+ expect = 10
+ infer = [ s for _, s in self.MS_planetlab.unitmodel.inferBaseUnits() ]
+ self.assertEqual(expect, len(infer), "BaseUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for _, d, _ in self.MS_planetlab.unitmodel.inferProductUnits() ]
+ self.assertEqual(expect, len(infer), "ProductUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for _, d, _, _ in self.MS_planetlab.unitmodel.inferPowerUnits() ]
+ self.assertEqual(expect, len(infer), "PowerUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 12
+ infer = [ d for _, d, _, _, _ in self.MS_planetlab.unitmodel.inferLinearTransformedUnits() ]
+ self.assertEqual(expect, len(infer), "LinearTransformedUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 2
+ infer = [ d for _, d, _, _, _ in self.MS_planetlab.unitmodel.inferRegexpTransformedUnits() ]
+ self.assertEqual(expect, len(infer), "RegexpTransformedUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 8
+ infer = [ d for d, _, _ in self.MS_planetlab.unitmodel.inferBaseDimensions() ]
+ self.assertEqual(expect, len(infer), "BaseDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for d, _, _ in self.MS_planetlab.unitmodel.inferDifferenceDimensions() ]
+ self.assertEqual(expect, len(infer), "DifferenceDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for d, _, _, _ in self.MS_planetlab.unitmodel.inferPowerDimensions() ]
+ self.assertEqual(expect, len(infer), "PowerDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for d, _, _ in self.MS_planetlab.unitmodel.inferProductDimensions() ]
+ self.assertEqual(expect, len(infer), "ProductDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 4
+ infer = [ d for d, _, _ in self.MS_planetlab.unitmodel.inferRatioDimensions() ]
+ self.assertEqual(expect, len(infer), "RatioDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+
+ NS = self.MS_planetlab.ontology.ns('unit')
+ for expect, uri in [(4, NS['second']), (7, NS['Byte']), (3, NS['bit']), (1, NS['unixtimestamp'])]:
+ infer = [s for s in self.MS_planetlab.unitmodel.inferPossiblePrefixesOf(uri)]
+ self.assertEqual(expect, len(infer), "inferPossiblePrefixesOf: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+
+
+ def test_IM_domainsfeatures(self):
+ expect = set(['Slice', 'Substrate'])
+ infer = set([ self.MS_planetlab.ontology._tail(x) for x in self.MS_planetlab.taskmodel.inferDomains() ])
+ self.assertEqual(expect, infer, "inferDomains: expect %d, got %d\n%s" % (len(expect), len(infer), str(infer)))
+
+ expect = len(self.feat_task) #19 feature van, de nehanynak nincs neve
+ infer = [ x for x in self.MS_planetlab.taskmodel.inferFeatures()]
+ self.assertEqual(expect, len(infer), "inferFeatures: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ def test_IM_task(self):
+ for feat, (t_subst, t_slice) in self.feat_task.iteritems():
+ feature = self.MS_planetlab.ontology.ns('feature')[feat]
+ infer_t_subst = [ name for _, name in self.MS_planetlab.taskmodel.inferTasks(self.substrate, feature)]
+ infer_t_slice = [ name for _, name in self.MS_planetlab.taskmodel.inferTasks(self.slice, feature)]
+ self.assertEqual(infer_t_subst, t_subst, "feature: %s searchtask (substrate): expect %s, got %s" % (feat, t_subst, infer_t_subst))
+ self.assertEqual(infer_t_slice, t_slice, "feature: %s searchtask (slice): expect %s, got %s" % (feat, t_slice, infer_t_slice))
+
+ task = self.MS_planetlab.ontology.ns('conf')['T_SONoMAPing']
+ infer = self.MS_planetlab.taskmodel.inferCredentialOf(task)
+ expect = set(sonomashortping.authtype)
+ self.assertEqual(infer, expect, "credentials differ expect: %s got: %s" % (expect, infer))
+
+ infer = self.MS_planetlab.taskmodel.inferDriverOf(task)
+ expect = sonomashortping.driver
+ self.assertEqual(infer, expect, "drivers differ expect: %s got: %s" % (expect, infer))
+
+ infer = self.MS_planetlab.taskmodel.inferHookparametersOf(task)
+ expect = sonomashortping.kwargs
+ self.assertEqual(infer, expect, "hook parameters differ expect: %s got: %s" % (expect, infer))
+
+ H = self.MS_planetlab.taskmodel.inferHookdefinitionsOf(task)
+ for k, h in H.iteritems():
+ exp = sonomashortping.hooks[k].strip()
+ h = h.strip()
+ self.assertEqual(h, exp, "%s hook differs\nexpect:\n%s\ngot:\n%s" % (k, exp, h))
+
+ #TODO: check feature equality
+ infer = [ (c.name, str(c._unit), str(c._dimension)) for c in self.MS_planetlab.taskmodel.inferDataheaderOf(task) ]
+ expect = [ (c.name, str(c._unit), str(c._dimension)) for c in sonomashortping.dataheaderdeclaration ]
+ self.assertEqual(infer, expect, "output header declarations differ expect:\n%s\ngot:\n%s" % (expect, infer))
+
+ infer = self.MS_planetlab.taskmodel.inferParametersOf(task)
+ expect = sonomashortping.parameters
+ n_inf, n_exp = set(infer.parameter_names()), set(expect.parameter_names())
+ self.assertEqual(n_inf, n_exp, "runtime parameters differ expect: %s got: %s" %(n_exp, n_inf))
+ for k, p in expect.parameter.iteritems():
+ inf_v = infer.parameter[k].value
+ exp_v = p.value
+ if exp_v is None:
+ self.assertFalse(inf_v, "Expected uninitialized value, got %s" % inf_v)
+ else:
+ inf_v = (inf_v[0], str(inf_v[1]))
+ exp_v = (exp_v[0], str(exp_v[1]))
+ self.assertEqual(inf_v, exp_v, "Parameter value differ %s expect:\n%s\ngot:\n%s" % (k, exp_v, inf_v))
+
+ feature = self.MS_planetlab.ontology.ns('feature')['RoundtripDelay']
+ expect = RoundTripDelay.p_obligatory
+ infer = self.MS_planetlab.taskmodel.inferObligatoryParametersOf(feature)
+ self.assertEqual(len(expect), len(infer), "obligatory parameters for %s differ expect: %s got: %s" % (feature, expect.parameter_names(), infer.parameter_names()))
+ for k, p in expect.parameter.iteritems():
+ inf_v = infer.parameter[k].value
+ exp_v = p.value
+ inf_v = (inf_v[0], str(inf_v[1]))
+ exp_v = (exp_v[0], str(exp_v[1]))
+ self.assertEqual(inf_v, exp_v, "Parameter value differ %s expect:\n%s\ngot:\n%s" % (k, exp_v, inf_v))
+
+ def test_taskBYuri(self):
+ cases = {
+ 'T_SSHPingSlice': [self.p_src_eltenode, self.slicename, self.count, self.p_dst_ntuanode],
+ 'T_SSHMemInfo': [self.p_src_eltenode],
+ 'T_SSHMemInfoSlice': [self.p_src_eltenode, self.slicename],
+ 'T_SSHCPULoad': [self.p_src_eltenode],
+ 'T_SSHCPULoadSlice': [self.p_src_eltenode, self.slicename],
+ 'T_SSHUptime': [self.p_src_eltenode],
+ 'T_SSHUptimeSlice': [self.p_src_eltenode, self.slicename],
+# 'T_SONoMAPing': [self.p_src_eltenode, self.p_dst_ntuanode],
+# 'T_hadesaggregate': [self.p_src_fednode, self.p_dst_fednode],
+ }
+ for l,p in cases.iteritems():
+ task_uri = self.MS_planetlab.ontology.ns('conf')[l]
+ _, task = self.MS_planetlab.newTask(task = task_uri,
+ cred = ple_credentials,
+ resource = None,
+ parameters = ParameterList(p))
+ task.enable()
+ task.dataAdded.wait( 15 )
+ self.assertGreater(len(task.data), 0, "measurement %s yielded empty result" % l)
+# print task.data._rawrecords
+
+
+ def test_owlexamples(self):
+ doc = "../monitoringmodel/monitoringQuery_example.owl" #% self.MS_planetlab.ontology.baseurl
+ # JYTHON hack for accessing owl files
+ im = monitoringmodel.im.im()
+ path = im.path
+ loader = pkgutil.get_loader("monitoringmodel.im")
+ g = Graph()
+ g.parse(source = StringIO.StringIO(loader.get_data(os.path.join(path, "monitoringQuery_example.owl"))))
+ print str(g)
+ qdict = self.MS_planetlab.QI.inferBundleQueries(qgraph = g)
+ self.assertTrue(len(qdict), "Got empty query")
+ for q in qdict:
+ domain = self.MS_planetlab.ontology.ns('task')['Substrate']
+ taskgen = self.MS_planetlab.taskmodel.inferTasks(domain, q.feature)
+ #we are ugly here: use the first tool
+ for task_uri, _ in taskgen: break
+ _, task = self.MS_planetlab.newTask(task = task_uri,
+ cred = ple_credentials,
+ resource = q.resource,
+ parameters = q.paramlist)
+ del task.strategy # make sure STRAT_ONDEMAND
+ task.enable()
+ task.dataAdded.wait( 15 )
+ task.dataAdded.clear()
+ if q.samplechain:
+ flow = []
+ for skeleton, parlist in q.samplechain:
+ flow.append((skeleton, parlist.formkeyvaldict()))
+ aid = self.MS_planetlab.am.newAggregator(task.data, CellRequestByName(name = 'Free Memory'), flow)
+ A = self.MS_planetlab.am[ aid ]
+ while True:
+ try:
+ s, a = A.data._rawrecords[0]
+ self.assertEqual(s, len(task.data), "inconsistency in length len(data)=%d, max of %d samples?" % (len(task.data), s))
+ R = DataReader(datasource = task.data)
+ R.extract(cellrequest = [CellRequestByName(name = 'Free Memory')])
+ expect = max( [ float(x) for x, in R ] )
+ self.assertEqual(expect, a, "inconsistency in aggregare %f <> %f" % (expect, a))
+ break
+ except SamplerError:
+ print "MEASURE SOME MORE ..."
+ task.disable()
+ task.enable()
+ task.dataAdded.wait( 15 )
+ task.dataAdded.clear()
+ self.assertGreater(len(task.data), 0, "measurement yielded empty result")
+
+
+
+if __name__ == "__main__":
+ #import sys;sys.argv = ['', 'Test.test_IM_domainsfeatures']
+ unittest2.main()
diff --git a/Monitoring/src/main/python/Semantics/test.py.old b/Monitoring/src/main/python/Semantics/test.py.old
new file mode 100644
index 0000000..2e31cb9
--- /dev/null
+++ b/Monitoring/src/main/python/Semantics/test.py.old
@@ -0,0 +1,250 @@
+'''
+Created on Aug 10, 2011
+
+@author: steger
+'''
+import unittest
+from rdflib import Graph
+from Example.Metrics import RoundTripDelay
+from Example.Tools import sonomashortping
+from DataProcessing.Parameter import ParameterList, Parameter
+from Example.credentials import ple_credentials
+from Example.Platforms import FRAMEWORK
+from DataProcessing.DataHeaderCell import CellRequestByName
+from DataProcessing.DataError import SamplerError
+from DataProcessing.DataReader import DataReader
+
+
+class Test(unittest.TestCase):
+
+ def setUp(self):
+ self.MS_planetlab = FRAMEWORK.getService('PlanetLab')
+ self.MS_federica = FRAMEWORK.getService('FEDERICA')
+
+ dim_ipaddress = self.MS_planetlab.dm['IPAddress']
+ node = ("157.181.175.243", self.MS_planetlab.um.ipv4dotted)
+ self.p_src_eltenode = Parameter(name = "SourceAddress", valuetype = str, unitmanager = self.MS_planetlab.um, dimension = dim_ipaddress, default = node)
+ node = ("147.102.22.66", self.MS_planetlab.um.ipv4dotted)
+ self.p_dst_ntuanode = Parameter(name = "DestinationAddress", valuetype = str, unitmanager = self.MS_planetlab.um, dimension = dim_ipaddress, default = node)
+ node = ("192.168.31.1", self.MS_planetlab.um.ipv4dotted)
+ self.p_src_fednode = Parameter(name = "SourceAddress", valuetype = str, unitmanager = self.MS_planetlab.um, dimension = dim_ipaddress, default = node)
+ node = ("192.168.31.9", self.MS_planetlab.um.ipv4dotted)
+ self.p_dst_fednode = Parameter(name = "DestinationAddress", valuetype = str, unitmanager = self.MS_planetlab.um, dimension = dim_ipaddress, default = node)
+
+ self.substrate = self.MS_planetlab.ontology.ns('task')['Substrate']
+ self.slice = self.MS_planetlab.ontology.ns('task')['Slice']
+
+ self.feat_task = {
+ 'OnewayDelay': (['SONoMAChirp'], []),
+ 'RoundtripDelay': (['SONoMAPing'], ['sshpingSlice']),
+ 'AvailableBandwidth': ([], ['sshabSlice']),
+ 'AvailableMemory': (['sshMeminfo'], ['sshMeminfoSlice']),
+ 'FreeMemory': (['sshMeminfo'], ['sshMeminfoSlice']),
+ 'MemoryUtilization': (['sshMeminfo'], ['sshMeminfoSlice']),
+ 'Uptime': (['sshuptime'], ['sshuptimeSlice']),
+ 'CPULoad': (['sshcpuload'], ['sshcpuloadSlice']),
+ 'CPUCores': (['sshcpuload'], ['sshcpuloadSlice']),
+ 'CPUUtilization': (['sshcpuload'], ['sshcpuloadSlice']),
+ 'FreeDiskSpace': (['sshdiskinfo'], ['sshdiskinfoSlice']),
+ 'UsedDiskSpace': (['sshdiskinfo'], ['sshdiskinfoSlice'])
+ }
+
+ dim_nameofsomething = self.MS_planetlab.dm['NameOfSomething']
+ self.slicename = Parameter(name = "SliceName", valuetype = str,
+ unitmanager = self.MS_planetlab.um, dimension = dim_nameofsomething,
+ default = ('novi_novi', self.MS_planetlab.um.unitless))
+ dim_countable = self.MS_planetlab.dm['Countable']
+ self.count = Parameter(name = 'Count', valuetype = int,
+ unitmanager = self.MS_planetlab.um, dimension = dim_countable,
+ default = (5, self.MS_planetlab.um.piece))
+
+ def test_managers(self):
+ expect = 14
+ infer = len(self.MS_planetlab.pm)
+ self.assertEqual(infer, expect, "Prefix: got %d expect %d" % (infer, expect))
+
+ expect = 10
+ infer = [ s for _, s in self.MS_planetlab.unitmodel.inferBaseUnits() ]
+ self.assertEqual(expect, len(infer), "BaseUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for _, d, _ in self.MS_planetlab.unitmodel.inferProductUnits() ]
+ self.assertEqual(expect, len(infer), "ProductUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for _, d, _, _ in self.MS_planetlab.unitmodel.inferPowerUnits() ]
+ self.assertEqual(expect, len(infer), "PowerUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 12
+ infer = [ d for _, d, _, _, _ in self.MS_planetlab.unitmodel.inferLinearTransformedUnits() ]
+ self.assertEqual(expect, len(infer), "LinearTransformedUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 2
+ infer = [ d for _, d, _, _, _ in self.MS_planetlab.unitmodel.inferRegexpTransformedUnits() ]
+ self.assertEqual(expect, len(infer), "RegexpTransformedUnit: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 8
+ infer = [ d for d, _, _ in self.MS_planetlab.unitmodel.inferBaseDimensions() ]
+ self.assertEqual(expect, len(infer), "BaseDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for d, _, _ in self.MS_planetlab.unitmodel.inferDifferenceDimensions() ]
+ self.assertEqual(expect, len(infer), "DifferenceDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for d, _, _, _ in self.MS_planetlab.unitmodel.inferPowerDimensions() ]
+ self.assertEqual(expect, len(infer), "PowerDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 1
+ infer = [ d for d, _, _ in self.MS_planetlab.unitmodel.inferProductDimensions() ]
+ self.assertEqual(expect, len(infer), "ProductDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ expect = 4
+ infer = [ d for d, _, _ in self.MS_planetlab.unitmodel.inferRatioDimensions() ]
+ self.assertEqual(expect, len(infer), "RatioDimension: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+
+ NS = self.MS_planetlab.ontology.ns('unit')
+ for expect, uri in [(4, NS['second']), (7, NS['Byte']), (3, NS['bit']), (1, NS['unixtimestamp'])]:
+ infer = [s for s in self.MS_planetlab.unitmodel.inferPossiblePrefixesOf(uri)]
+ self.assertEqual(expect, len(infer), "inferPossiblePrefixesOf: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+
+
+ def test_IM_domainsfeatures(self):
+ expect = set(['Slice', 'Substrate'])
+ infer = set([ self.MS_planetlab.ontology._tail(x) for x in self.MS_planetlab.taskmodel.inferDomains() ])
+ self.assertEqual(expect, infer, "inferDomains: expect %d, got %d\n%s" % (len(expect), len(infer), str(infer)))
+
+ expect = len(self.feat_task) #19 feature van, de nehanynak nincs neve
+ infer = [ x for x in self.MS_planetlab.taskmodel.inferFeatures()]
+ self.assertEqual(expect, len(infer), "inferFeatures: expect %d, got %d\n%s" % (expect, len(infer), str(infer)))
+
+ def test_IM_task(self):
+ for feat, (t_subst, t_slice) in self.feat_task.iteritems():
+ feature = self.MS_planetlab.ontology.ns('feature')[feat]
+ infer_t_subst = [ name for _, name in self.MS_planetlab.taskmodel.inferTasks(self.substrate, feature)]
+ infer_t_slice = [ name for _, name in self.MS_planetlab.taskmodel.inferTasks(self.slice, feature)]
+ self.assertEqual(infer_t_subst, t_subst, "feature: %s searchtask (substrate): expect %s, got %s" % (feat, t_subst, infer_t_subst))
+ self.assertEqual(infer_t_slice, t_slice, "feature: %s searchtask (slice): expect %s, got %s" % (feat, t_slice, infer_t_slice))
+
+ task = self.MS_planetlab.ontology.ns('conf')['T_SONoMAPing']
+ infer = self.MS_planetlab.taskmodel.inferCredentialOf(task)
+ expect = set(sonomashortping.authtype)
+ self.assertEqual(infer, expect, "credentials differ expect: %s got: %s" % (expect, infer))
+
+ infer = self.MS_planetlab.taskmodel.inferDriverOf(task)
+ expect = sonomashortping.driver
+ self.assertEqual(infer, expect, "drivers differ expect: %s got: %s" % (expect, infer))
+
+ infer = self.MS_planetlab.taskmodel.inferHookparametersOf(task)
+ expect = sonomashortping.kwargs
+ self.assertEqual(infer, expect, "hook parameters differ expect: %s got: %s" % (expect, infer))
+
+ H = self.MS_planetlab.taskmodel.inferHookdefinitionsOf(task)
+ for k, h in H.iteritems():
+ exp = sonomashortping.hooks[k].strip()
+ h = h.strip()
+ self.assertEqual(h, exp, "%s hook differs\nexpect:\n%s\ngot:\n%s" % (k, exp, h))
+
+ #TODO: check feature equality
+ infer = [ (c.name, str(c._unit), str(c._dimension)) for c in self.MS_planetlab.taskmodel.inferDataheaderOf(task) ]
+ expect = [ (c.name, str(c._unit), str(c._dimension)) for c in sonomashortping.dataheaderdeclaration ]
+ self.assertEqual(infer, expect, "output header declarations differ expect:\n%s\ngot:\n%s" % (expect, infer))
+
+ infer = self.MS_planetlab.taskmodel.inferParametersOf(task)
+ expect = sonomashortping.parameters
+ n_inf, n_exp = set(infer.parameter_names()), set(expect.parameter_names())
+ self.assertEqual(n_inf, n_exp, "runtime parameters differ expect: %s got: %s" %(n_exp, n_inf))
+ for k, p in expect.parameter.iteritems():
+ inf_v = infer.parameter[k].value
+ exp_v = p.value
+ if exp_v is None:
+ self.assertFalse(inf_v, "Expected uninitialized value, got %s" % inf_v)
+ else:
+ inf_v = (inf_v[0], str(inf_v[1]))
+ exp_v = (exp_v[0], str(exp_v[1]))
+ self.assertEqual(inf_v, exp_v, "Parameter value differ %s expect:\n%s\ngot:\n%s" % (k, exp_v, inf_v))
+
+ feature = self.MS_planetlab.ontology.ns('feature')['RoundtripDelay']
+ expect = RoundTripDelay.p_obligatory
+ infer = self.MS_planetlab.taskmodel.inferObligatoryParametersOf(feature)
+ self.assertEqual(len(expect), len(infer), "obligatory parameters for %s differ expect: %s got: %s" % (feature, expect.parameter_names(), infer.parameter_names()))
+ for k, p in expect.parameter.iteritems():
+ inf_v = infer.parameter[k].value
+ exp_v = p.value
+ inf_v = (inf_v[0], str(inf_v[1]))
+ exp_v = (exp_v[0], str(exp_v[1]))
+ self.assertEqual(inf_v, exp_v, "Parameter value differ %s expect:\n%s\ngot:\n%s" % (k, exp_v, inf_v))
+
+ def test_taskBYuri(self):
+ cases = {
+ 'T_SSHPingSlice': [self.p_src_eltenode, self.slicename, self.count, self.p_dst_ntuanode],
+ 'T_SSHMemInfo': [self.p_src_eltenode],
+ 'T_SSHMemInfoSlice': [self.p_src_eltenode, self.slicename],
+ 'T_SSHCPULoad': [self.p_src_eltenode],
+ 'T_SSHCPULoadSlice': [self.p_src_eltenode, self.slicename],
+ 'T_SSHUptime': [self.p_src_eltenode],
+ 'T_SSHUptimeSlice': [self.p_src_eltenode, self.slicename],
+# 'T_SONoMAPing': [self.p_src_eltenode, self.p_dst_ntuanode],
+# 'T_hadesaggregate': [self.p_src_fednode, self.p_dst_fednode],
+ }
+ for l,p in cases.iteritems():
+ task_uri = self.MS_planetlab.ontology.ns('conf')[l]
+ _, task = self.MS_planetlab.newTask(task = task_uri,
+ cred = ple_credentials,
+ resource = None,
+ parameters = ParameterList(p))
+ task.enable()
+ task.dataAdded.wait( 15 )
+ self.assertGreater(len(task.data), 0, "measurement %s yielded empty result" % l)
+# print task.data._rawrecords
+
+
+ def test_owlexamples(self):
+ doc = "%s/monitoringQuery_example.owl" % self.MS_planetlab.ontology.baseurl
+ g = Graph()
+ g.parse(source = doc)
+ qdict = self.MS_planetlab.QI.getBundleQuery(qgraph = g)
+ self.assertTrue(len(qdict), "Got empty query")
+ for q in qdict:
+ domain = self.MS_planetlab.ontology.ns('task')['Substrate']
+ taskgen = self.MS_planetlab.taskmodel.inferTasks(domain, q.feature)
+ #we are ugly here: use the first tool
+ for task_uri, _ in taskgen: break
+ _, task = self.MS_planetlab.newTask(task = task_uri,
+ cred = ple_credentials,
+ resource = q.resource,
+ parameters = q.paramlist)
+ del task.strategy # make sure STRAT_ONDEMAND
+ task.enable()
+ task.dataAdded.wait( 15 )
+ task.dataAdded.clear()
+ if q.samplechain:
+ flow = []
+ for skeleton, parlist in q.samplechain:
+ flow.append((skeleton, parlist.formkeyvaldict()))
+ aid = self.MS_planetlab.am.newAggregator(task.data, CellRequestByName(name = 'Free Memory'), flow)
+ A = self.MS_planetlab.am[ aid ]
+ while True:
+ try:
+ s, a = A.data._rawrecords[0]
+ self.assertEqual(s, len(task.data), "inconsistency in length len(data)=%d, max of %d samples?" % (len(task.data), s))
+ R = DataReader(datasource = task.data)
+ R.extract(cellrequest = [CellRequestByName(name = 'Free Memory')])
+ expect = max( [ float(x) for x, in R ] )
+ self.assertEqual(expect, a, "inconsistency in aggregare %f <> %f" % (expect, a))
+ break
+ except SamplerError:
+ print "MEASURE SOME MORE ..."
+ task.disable()
+ task.enable()
+ task.dataAdded.wait( 15 )
+ task.dataAdded.clear()
+ self.assertGreater(len(task.data), 0, "measurement yielded empty result")
+
+
+
+if __name__ == "__main__":
+ #import sys;sys.argv = ['', 'Test.test_IM_domainsfeatures']
+ unittest.main()
diff --git a/Monitoring/src/main/python/Service/MonSrvImpl.py b/Monitoring/src/main/python/Service/MonSrvImpl.py
new file mode 100644
index 0000000..36da1cc
--- /dev/null
+++ b/Monitoring/src/main/python/Service/MonSrvImpl.py
@@ -0,0 +1,326 @@
+'''
+Created on 08.08.2011
+
+@author: Sandor Laki
+'''
+
+from __future__ import with_statement
+from rdflib import Graph
+from StringIO import StringIO
+from Service.interface import MSInterface
+from eu.novi.monitoring import MonDiscoveryImpl
+import sys
+from Util.MonitoringQueryImpl import MonitoringQueryImpl
+from threading import Lock
+from org.slf4j import Logger
+from org.slf4j import LoggerFactory
+from os import path, access, R_OK
+#import eu.novi.feedback.event.ReportEvent
+import traceback
+import java.lang.StackOverflowError
+import java.lang.Error
+
+try:
+ import site
+ site.addsitedir('../site-packages')
+except ImportError, e:
+ sys.stderr.write("[EXCEPTION] import Site -> %s\n" % e)
+
+
+
+try:
+ from eu.novi.monitoring import MonSrv
+except ImportError:
+ MonSrv = object
+
+try:
+ from eu.novi.monitoring import Wiring
+except ImportError:
+ Wiring = object
+
+try:
+ from eu.novi.im.core import Resource
+except ImportError, e:
+ sys.stderr.write("[EXCEPTION] Resource -> %s\n" % e)
+ Resource = None
+
+
+class MonSrvImpl(MonSrv,Wiring):
+ testbed = "Undefined"
+ userFeedback = None
+ lock = Lock()
+
+ log = LoggerFactory.getLogger("eu.novi.monitoring.MonSrv")
+
+ def __init__(self):
+ #self.testbed = "Unknown"
+ self._msi = None
+ self.framework = MonDiscoveryImpl()
+ self.log.info("MonSrvImpl has started... Testbed=%s" % self.testbed)
+
+ def createQuery(self):
+ return MonitoringQueryImpl(self.getMSI()._ms)
+
+ def getMSI(self):
+ print "getMSI %s" % self.getTestbed()
+ self.log.info("getMSI %s" % self.getTestbed())
+ tbname = self.getTestbed()
+ with self.lock:
+ if self._msi is None:
+ baseurl = ""
+ config_owl = "config_%s.owl" % (tbname.lower())
+ self.log.info("Testbed specific configuration: %s" % config_owl)
+ #config_owl = "config_planetlab.owl"
+ try:
+ self._msi = MSInterface(self.framework, self.getTestbed(), baseurl, config_owl)
+ except:
+ self.log.info("Error occured at %s" % config_owl)
+ config_owl = "config_planetlab.owl"
+ self._msi = MSInterface(self.framework, self.getTestbed(), baseurl, config_owl)
+ self.log.info("MSInterface has been instanciated... Testbed=%s" % self.getTestbed() )
+ return self._msi
+
+
+ def setPolicy(self, policy):
+ self.policy = policy
+
+ def getPolicy(self):
+ return self.policy
+
+ def getPlatform(self):
+ return self.testbed
+
+ def setResource(self, resource):
+ self.resource = resource
+
+ def getResource(self):
+ return self.resource
+
+ def getTestbed(self):
+ return self.testbed
+
+ def setTestbed(self, testbed):
+ self.testbed = testbed
+
+ def getUserFeedback(self):
+ return self.userFeedback
+
+ def setUserFeedback(self, userFeedback):
+ self.userFeedback = userFeedback
+
+ # Test purpose function
+ def echo(self, platform):
+ '''
+ @summary: An integration tester function (to be exported public)
+ @param platform: name of the platform
+ @type platform: string
+ @return: messages of the platforms taking part in the message flow
+ @rtype: string
+ '''
+ return self.getMSI().echo(platform)
+
+ def extractCredential(self, credential):
+ cred = []
+ if credential.getType()=="UsernamePassword": cred=[{'username' : credential.username, 'password' : credential.password}]
+ elif credential.getType()=="UsernameRSAKey": cred=[{'username' : credential.username, 'password' : credential.password, 'rsakey' : credential.RSAKey}]
+ else: return "Error - unknown credential...."
+
+ # Hardcoded credential - TODO: FIX IT ASAP!!!
+ PATH="/home/novi/apache-servicemix-4.4.1-fuse-01-06/instances/system-tests/etc/root_planetlab_rsa"
+
+ try:
+ #PATH="/home/novi/apache-servicemix-4.4.1-fuse-01-06/instances/system-tests/etc/sfademo_key"
+ if path.exists(PATH) and path.isfile(PATH) and access(PATH, R_OK):
+ cred=[{'username' : "root", 'password' : "", 'rsakey' : PATH}]
+ self.log.info("root path exists and readable")
+ except:
+ self.log.info("root key cannot be accessed at %s" % PATH)
+ if not path.exists(PATH):
+ self.log.info("path doesn't exists")
+ if not path.isfile(PATH):
+ self.log.info("path is not a file")
+ if not access(PATH, R_OK):
+ self.log.info("file cannot be accessed, permission issue?")
+ #pass
+ cred.append({'username':'monitor1','password':'m/n.t,r1'}) # G3 Access
+ return cred
+
+
+ # Substrate monitoring function
+ def measure(self, credential, query):
+ '''
+ @summary: Method to handle substrate monitoring queries (to be exported public)
+ @param credential:
+ @type credential:
+ @param query: an owl document containing several BundleQuery instances
+ @type query: string
+ @return: response to the query
+ @rtype: string
+ '''
+ cred = self.extractCredential( credential )
+ self.log.info("New substrate monitoring query has arrived: %s" % query)
+ try:
+ print "Call measure"
+ #TODO: split query and concatenate results
+ return self.getMSI().measure(cred, query)
+ except Exception, e:
+ self.log.info("Exception %s %s" % (e, traceback.format_exc()))
+ except java.lang.StackOverflowError, se:
+ se.printStackTrace()
+ self.log.info("unknown %s" % se.toString())
+ except java.lang.Error, er:
+ er.printStackTrace()
+ return "[]"
+
+ def substrateFB(self, credential, query, sessionID):
+ try:
+ self.getUserFeedback().instantInfo(sessionID, "MS", "A substrate monitoring task has been submitted.", "http://fp7-novi.eu");
+ except:
+ self.log.info("Feedback thrown an exception")
+ return self.measure(credential, query)
+
+ def substrate(self, credential, query):
+ return self.measure(credential, query)
+
+ # Slice monitoring functions
+ def sliceTasks(self, credential, query):
+ return "sliceTasks() method is not implemented"
+
+ def addTask(self, credential, query):
+ '''
+ @summary: Method to start slice monitoring tasks (to be exported public)
+ @param credential:
+ @type credential:
+ @param query: an owl document containing several BundleQuery instances
+ @type query: string
+ @return: process identifier
+ @rtype: string
+ '''
+ #TODO: investigate if the service instance under this interface should be the boss
+ cred = self.extractCredential( credential )
+ return self.getMSI().launchTasks(cred, query)
+
+ def describeTaskData(self, credential, query):
+ '''
+ @summary: Method to retrieve meta data of task data (to be exported public)
+ @param credential:
+ @type credential:
+ @param query:
+ @type query: string
+ @return: serialize the header of the data tables
+ @rtype: string
+ '''
+ cred = self.extractCredential( credential )
+ return self.getMSI().describeTaskData(cred, query)
+
+
+ def fetchTaskData(self, credential, query):
+ '''
+ @summary: Method to retrieve task data collected since last fetch or the start (to be exported public)
+ @param credential:
+ @type credential:
+ @param query:
+ @type query: string
+ @return: serialize the appended content of the data tables
+ @rtype: string
+ '''
+ cred = self.extractCredential( credential )
+ return self.getMSI().fetchTaskData(cred, query)
+
+ def modifyTask(self, credential, query):
+ raise InterfaceError("modifyTask() method is not implemented")
+
+ def removeTask(self, credential, query):
+ '''
+ @summary: Method to remove a slice measurement task (to be exported public)
+ @param credential:
+ @type credential:
+ @param query:
+ @type query: string
+ '''
+ cred = self.extractCredential( credential )
+ return self.getMSI().removeTask(cred, query)
+
+ def enableTask(self, credential, query):
+ '''
+ @summary: Method to enable a slice measurement task (to be exported public)
+ @param credential:
+ @type credential:
+ @param query:
+ @type query: string
+ '''
+ cred = self.extractCredential( credential )
+ return self.getMSI().enableTask(cred, query)
+
+ def disableTask(self, credential, query):
+ '''
+ @summary: Method to disable a slice measurement task temporarily (to be exported public)
+ @param credential:
+ @type credential:
+ @param query:
+ @type query: string
+ '''
+ cred = self.extractCredential( credential )
+ return self.getMSI().disableTask(cred, query)
+
+ def getTaskStatus(self, credential, query):
+ '''
+ @summary: Method to check the state of a slice measurement task (to be exported public)
+ @param credential:
+ @type credential:
+ @param query:
+ @type query: string
+ @return: True if the tasks are running
+ @rtype: boolean
+ '''
+ cred = self.extractCredential( credential )
+ return self.getMSI().getTaskStatus(cred, query)
+
+ def addAggregator(self, credential, query):
+ '''
+ @summary: Method to define new data manipulation on slice monitoring data (to be exported public)
+ @param credential:
+ @type credential:
+ @param query: an owl document containing several SampleManipulationQuery instances
+ @type query: string
+ @return: aggregator identifier
+ @rtype: string
+ '''
+ #TODO: investigate if the service instance under this interface should be the boss
+ cred = self.extractCredential( credential )
+ return self.getMSI().addAggregator(cred, query)
+
+ def removeAggregator(self, credential, query):
+ '''
+ @summary: Method to remove data manipulation on slice monitoring data (to be exported public)
+ @param credential:
+ @type credential:
+ @param query:
+ @type query: string
+ '''
+ cred = self.extractCredential( credential )
+ return self.getMSI().removeAggregator(cred, query)
+
+ def fetchAggregatorData(self, credential, query):
+ '''
+ @summary: Method to refresh and serialize results of data manipulation on slice monitoring data (to be exported public)
+ @param credential:
+ @type credential:
+ @param query:
+ @type query: string
+ @return: result of aggregators
+ @rtype: string
+ '''
+ cred = self.extractCredential( credential )
+ return self.getMSI().fetchAggregatorData(cred, query)
+
+ def addCondition(self, credential, query):
+ raise InterfaceError("addCondition() method is not implemented")
+
+ def modifyCondition(self, credential, query):
+ raise InterfaceError("modifyCondition() method is not implemented")
+
+ def removeCondition(self, credential, query):
+ raise InterfaceError("removeCondition() method is not implemented")
+
+
diff --git a/Monitoring/src/main/python/Service/MonitoringService$py.class b/Monitoring/src/main/python/Service/MonitoringService$py.class
new file mode 100644
index 0000000..1d60f70
--- /dev/null
+++ b/Monitoring/src/main/python/Service/MonitoringService$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Service/MonitoringService.py b/Monitoring/src/main/python/Service/MonitoringService.py
new file mode 100644
index 0000000..f6102dc
--- /dev/null
+++ b/Monitoring/src/main/python/Service/MonitoringService.py
@@ -0,0 +1,382 @@
+from __future__ import with_statement
+'''
+Created on Mar 22, 2012
+
+@author: steger
+'''
+from time import sleep
+from DataProcessing.Prefix import PrefixManager
+from DataProcessing.Unit import UnitManager
+from DataProcessing.Dimension import DimensionManager
+from Semantics.InformationModel import Ontology
+from Semantics.UnitModel import UnitModel
+from Semantics.TaskModel import TaskModel
+from Semantics.QueryInterpreter import QueryInterpreter
+from Task.Task import SubtaskManager, TaskError, STRAT_PERIODICAL,\
+ STRAT_ONDEMAND
+from DataProcessing.Parameter import ParameterList
+from Resource.node import node
+from Resource.link import link
+from DataProcessing.AggregatorManager import AggregatorManager
+from DataProcessing.MeasurementLevel import lut_level
+#from paramiko.ssh_exception import BadAuthenticationType
+import logging
+from rdflib import Graph
+from StringIO import StringIO
+from DataProcessing.DataFormatter import JsonFormatter
+from DataProcessing.DataHeaderCell import CellRequestByFeature,\
+ CellRequestByName
+from DataProcessing.DataError import SamplerError
+import traceback
+
+class MonitoringService(object):
+ '''
+ classdocs
+ '''
+ version = "0.0"
+
+ def __str__(self):
+ return "NOVI Monitoring Service v%s @ %s" % (self.version, self.platform)
+
+ def _get_platform(self):
+ return self._if.platform
+
+ def __init__(self, interface, baseurl, config_owl):
+ '''
+ @summary: constructor
+ @param interface:
+ @type interface: MSInterface
+ @param baseurl: the location of the ontology files. Either poin to the file system or to a public url
+ @type baseurl: str
+ @param config_owl: platform specific configuration model
+ @type config_owl: str
+ '''
+ self._if = interface
+ self.logger = logging.getLogger(name = "NOVI.MS.%s" % self.platform)
+ self.log = self._if.log # to be removed
+ self.pm = PrefixManager()
+ self.um = UnitManager()
+ self.dm = DimensionManager(self.um)
+ self.stm = SubtaskManager(self.um)
+ self.am = AggregatorManager()
+ self.domains = []
+ self.features = []
+ self.ontology = Ontology(baseurl = baseurl, config_owl = config_owl)
+ self.unitmodel = UnitModel(self.ontology)
+ self.taskmodel = TaskModel(self.dm, self.um, self.ontology)
+ um = self.unitmodel
+
+ # infer and store prefixes
+ for (p_reference, p_symbol, base, exponent) in um.inferPrefixes():
+ self.pm.newPrefix( self.ontology._tail(p_reference), p_symbol, base, exponent )
+
+ # infer basic units
+ for u_reference, u_symbol in um.inferBaseUnits():
+ self.storeBasicUnit(u_reference, u_symbol)
+ for u_reference, u_symbol, _, _ in um.inferPowerUnits():
+ self.storeBasicUnit(u_reference, u_symbol)
+ for u_reference, u_symbol, _ in um.inferProductUnits():
+ self.storeBasicUnit(u_reference, u_symbol)
+ for u_reference, u_symbol, derivedfrom, scale, offset in um.inferLinearTransformedUnits():
+ self.storeLinearTransformedUnit(u_reference, u_symbol, derivedfrom, scale, offset)
+ for u_reference, u_symbol, derivedfrom, expr_fwd, expr_inv in um.inferRegexpTransformedUnits():
+ uref = self.ontology._tail(u_reference)
+ ancestor = self.um[ self.ontology._tail(derivedfrom) ]
+ self.um.addRegexpTransformedUnit(uref, u_symbol, ancestor, expr_fwd, expr_inv)
+
+ # infer dimensions
+ #FIXME: if there is a reference loop an error is raised...
+ for d_reference, u_reference, level in um.inferBaseDimensions():
+ dref = self.ontology._tail(d_reference)
+ uref = self.ontology._tail(u_reference)
+ lref = self.ontology._tail(level)
+ level = lut_level[lref]
+ unit = self.um[uref]
+ self.dm.newBaseDimension(dref, dref, unit, level)
+ for d_reference, u_reference, d_derivedfrom in um.inferDifferenceDimensions():
+ dref = self.ontology._tail(d_reference)
+ uref = self.ontology._tail(u_reference)
+ daref = self.ontology._tail(d_derivedfrom)
+ unit = self.um[uref]
+ derivedfrom = self.dm[daref]
+ self.dm.newDerivedDimension(dref, dref, unit, derivedfrom, self.dm.DifferenceDimension)
+ for d_reference, u_reference, d_derivedfrom, exponent in um.inferPowerDimensions():
+ dref = self.ontology._tail(d_reference)
+ uref = self.ontology._tail(u_reference)
+ daref = self.ontology._tail(d_derivedfrom)
+ unit = self.um[uref]
+ derivedfrom = self.dm[daref]
+ self.dm.newDerivedDimension(dref, dref, unit, derivedfrom, self.dm.PowerDimension, exponent = exponent)
+ for d_reference, u_reference, d_derivedfrom in um.inferProductDimensions():
+ dref = self.ontology._tail(d_reference)
+ uref = self.ontology._tail(u_reference)
+ unit = self.um[uref]
+ derivedfrom = tuple( self.dm[self.ontology._tail(x)] for x in d_derivedfrom )
+ self.dm.newDerivedDimension(dref, dref, unit, derivedfrom, self.dm.ProductDimension)
+ for d_reference, u_reference, d_derivedfrom in um.inferRatioDimensions():
+ dref = self.ontology._tail(d_reference)
+ uref = self.ontology._tail(u_reference)
+ daref = self.ontology._tail(d_derivedfrom)
+ unit = self.um[uref]
+ derivedfrom = self.dm[daref]
+ self.dm.newDerivedDimension(dref, dref, unit, derivedfrom, self.dm.RatioDimension)
+
+ # infer domains and features
+ for uri_domain in self.taskmodel.inferDomains():
+ self.domains.append(uri_domain)
+ for uri_feature, _, _ in self.taskmodel.inferFeatures():
+ self.features.append(uri_feature)
+
+ self.QI = QueryInterpreter(self.taskmodel)
+
+ self._nextID = 0
+ self.subtaskIDs = {}
+ self.aggregatorIDs = {}
+ self.formatters = {}
+
+
+ def storeBasicUnit(self, u_reference, u_symbol):
+ uref = self.ontology._tail(u_reference)
+ bu = self.um.newBasicUnit(uref, u_symbol)
+ for p_reference in self.unitmodel.inferPossiblePrefixesOf(u_reference):
+ p = self.pm[ self.ontology._tail(p_reference) ]
+ puref = "%s_%s" % (p.reference, uref)
+ symbol = "%s%s" % (p.symbol, bu.symbol)
+ self.um.addLinearTransformedUnit(puref, symbol, bu, p.scale)
+
+ def storeLinearTransformedUnit(self, u_reference, u_symbol, derivedfrom, scale, offset):
+ uref = self.ontology._tail(u_reference)
+ ancestor = self.um[ self.ontology._tail(derivedfrom) ]
+ bu = self.um.addLinearTransformedUnit(uref, u_symbol, ancestor, scale, offset)
+ for p_reference in self.unitmodel.inferPossiblePrefixesOf(u_reference):
+ p = self.pm[ self.ontology._tail(p_reference) ]
+ puref = "%s_%s" % (p.reference, uref)
+ symbol = "%s%s" % (p.symbol, bu.symbol)
+ self.um.addLinearTransformedUnit(puref, symbol, bu, p.scale)
+
+ def newProcessID(self):
+ try:
+ return "%s:process:%d" % (self.platform, self._nextID)
+ finally:
+ self._nextID += 1
+
+ def newAggregateID(self, isprocess = True):
+ try:
+ return "%s:aggregate:%d" % (self.platform, self._nextID)
+ finally:
+ self._nextID += 1
+
+ def measure(self, credential, query):
+ #TODO: docs
+ '''
+ '''
+ g = Graph()
+ g += self.ontology.graph
+ sio = StringIO(query)
+ g.parse(source = sio)
+ responses = []
+ errors = []
+ queries = self.QI.inferBundleQueries(qgraph = g)
+ self.log(shortmsg = "measurements starting...", message = "Attempt to launch %d measurement threads" % len(queries))
+ for q in queries:
+ feature_uri = q.feature
+ domain = self.ontology.ns('task')['Substrate']
+ taskgen = self.taskmodel.inferTasks(domain, feature_uri)
+ no_tool = True
+ (resource_uri, resource) = q.resource
+ #we are ugly here: use the first tool
+ for task_uri, _ in taskgen:
+ try:
+ no_tool = False
+ _, task = self.newTask(task = task_uri, cred = credential, resource = resource, parameters = q.paramlist)
+ if task is None:
+ continue
+ if q.samplechain:
+ task.strategy = STRAT_PERIODICAL
+ # we apply some aggregation to the data
+ flow = []
+ for skeleton, parlist in q.samplechain:
+ flow.append((skeleton, parlist.formkeyvaldict()))
+ aid = self.am.newAggregator(task.data, CellRequestByFeature(feature = q.feature), flow)
+ A = self.am[aid]
+ task.enable()
+ while True:
+ try:
+ task.dataAdded.wait( 15 )
+ formatter = JsonFormatter(datasource = A.data)
+ break
+ except SamplerError:
+ task.dataAdded.clear()
+ sleep(1)
+ else:
+ task.strategy = STRAT_ONDEMAND
+ task.enable()
+ task.dataAdded.wait( 15 )
+ formatter = JsonFormatter(datasource = task.data)
+ formatter.extract(cellrequest = [
+ CellRequestByName(name = "Run"),
+ CellRequestByFeature(feature = feature_uri)
+ ])
+ t = formatter.serialize()
+ try:
+ print "Call task.destroy"
+ task.destroy()
+ except:
+ pass
+ #print "retek",t
+ if t is not None:
+ if len(t)>0:
+ responses.append( "{\"%s\" : %s}" %(feature_uri,t) ) #formatter.serialize() )
+ except Exception, e:
+ tbe = traceback.format_exc()
+ err_desc = "Unexpected exception occured: %s, %s" % (e, tbe)
+ errors.append(err_desc)
+ if no_tool:
+ err_description = "No tools to measure %s @ %s" % (feature_uri, resource_uri)
+ errors.append(err_description)
+ self.log(shortmsg = "Limited result set", message = err_description)
+ useful_data = ",\n".join( responses )
+ error_data = "+".join(errors)
+ if len(errors):
+ if len(useful_data):
+ response = "[%s,\n{\"errors\" : \"%s\"}]" % (useful_data, error_data)
+ else:
+ response = "[{\"errors\" : \"%s\"}]" % (error_data)
+ else:
+ response = "[%s]" % useful_data
+ return response
+
+ def launchTasks(self, credential, query):
+ #TODO: many things in common with measure!!!
+ g = Graph()
+ g += self.ontology.graph
+ sio = StringIO(query)
+ g.parse(source = sio)
+ taskID = self.newID()
+ idstore = self.subtaskIDs[taskID] = []
+ formatters = self.formatters[taskID] = []
+ for q in self.QI.getBundleQuery(qgraph = g):
+ feature_uri = q.feature
+
+ print "PPPPP", q.paramlist
+
+ domain = self.ontology.ns('task')['Slice']
+ taskgen = self.taskmodel.inferTasks(domain, feature_uri)
+ #we are ugly here: use the first tool
+ for task_uri, _ in taskgen:
+ subtaskID, task = self.newTask(task = task_uri, cred = credential, resource = q.resource, parameters = q.paramlist)
+ task.strategy = STRAT_PERIODICAL
+ task.enable()
+ idstore.append(subtaskID)
+ f = q.formatter(datasource = task.data)
+ formatters.append(f)
+ if len(idstore):
+ return taskID
+ else:
+ self.subtaskIDs.pop(taskID)
+ self.formatters.pop(taskID)
+ return None
+
+
+ platform = property(_get_platform,None,None)
+
+
+ def newTask(self, task, cred, resource = None, parameters = ParameterList()):
+ '''
+ @summary: initialize a Task object, which is referenced by a uri
+ @param task: the reference to the task description
+ @type task: URIRef
+ @param cred: an iterable over dictionaries, which are used as input parameters to initialize Credential templates passed to the Task object for authentication, authorization purposes
+ @type cred: dict generator
+ @param resource: the resource to measure
+ @type resource: resource or None
+ @param parameters: the parameter list to refresh the default parameters of the Task object
+ @type parameters: ParameterList
+ @return: the tuple of taskID and the initialized measurement Task object
+ @rtype: int, Task
+ '''
+ name = self.ontology._tail(task)
+ credset = self.taskmodel.inferCredentialOf(task)
+ driver = self.taskmodel.inferDriverOf(task)
+ hdr = self.taskmodel.inferDataheaderOf(task)
+ hooks = self.taskmodel.inferHookdefinitionsOf(task)
+ hookpar = self.taskmodel.inferHookparametersOf(task)
+ taskparameters = self.taskmodel.inferParametersOf(task)
+
+ taskparameters.update_by_list(parameters)
+
+ #TODO: maybe better push resource to the Task as an argument
+ if isinstance(resource, node):
+ addr, unit = resource.get_ipaddress("eth0")
+ taskparameters.update("SourceAddress", addr, unit)
+ elif isinstance(resource, link):
+ addr, unit = resource.source.address
+ taskparameters.update("SourceAddress", addr, unit)
+ addr, unit = resource.destination.address
+ taskparameters.update("DestinationAddress", addr, unit)
+
+# print taskparameters
+
+ while len(credset):
+ ct = credset.pop()
+ for c in cred:
+ try:
+ credential = ct(**c)
+ except:
+ # credential mismatch go on with the next
+ continue
+ try:
+ return self.stm.generate(name = name, driver = driver, dataheader = hdr,
+ hookimplementations = hooks, parameters = taskparameters, credential = credential, **hookpar)
+ except Exception, e:
+ print "Exception - %s" % e
+ pass
+ return None, None
+ #raise TaskError("Cannot initialize the Task with the credential set provided for %s" % name)
+
+ def delTask(self, taskidentifier):
+ self.stm.pop( taskidentifier )
+
+ def getTask(self, taskidentifier):
+ return self.stm[ taskidentifier ]
+
+ def attachAggregators(self, credential, query):
+ g = Graph()
+ g += self.ontology.graph
+ sio = StringIO(query)
+ g.parse(source = sio)
+ aggregatorID = self.newID()
+ idstore = self.aggregatorIDs[aggregatorID] = []
+ formatters = self.formatters[aggregatorID] = []
+ raise Exception("unimplemented")
+# for q in self.QI.getBundleQuery(qgraph = g):
+# feature_uri = q.feature
+#
+# print "PPPPP", q.paramlist
+#
+# domain = self.ontology.ns('task')['Slice']
+# taskgen = self.taskmodel.inferTasks(domain, feature_uri)
+# #we are ugly here: use the first tool
+# for task_uri, _ in taskgen:
+# subtaskID, task = self.newTask(task = task_uri, cred = credential, resource = q.resource, parameters = q.paramlist)
+# task.strategy = STRAT_PERIODICAL
+# task.enable()
+# idstore.append(subtaskID)
+# f = q.formatter(datasource = task.data)
+# formatters.append(f)
+ if len(idstore):
+ return aggregatorID
+ else:
+ self.subtaskIDs.pop(aggregatorID)
+ self.formatters.pop(aggregatorID)
+ return None
+
+ def newAggregator(self):
+ pass
+
+ def delAggregator(self, aggregatoridentifier):
+ self.am.pop( aggregatoridentifier )
+
+ def getAggregator(self, aggregatoridentifier):
+ return self.am[ aggregatoridentifier ]
+
diff --git a/Monitoring/src/main/python/Service/MonitoringService.py.old b/Monitoring/src/main/python/Service/MonitoringService.py.old
new file mode 100644
index 0000000..0939c73
--- /dev/null
+++ b/Monitoring/src/main/python/Service/MonitoringService.py.old
@@ -0,0 +1,354 @@
+'''
+Created on Mar 22, 2012
+
+@author: steger
+'''
+from time import sleep
+from DataProcessing.Prefix import PrefixManager
+from DataProcessing.Unit import UnitManager
+from DataProcessing.Dimension import DimensionManager
+from Semantics.InformationModel import Ontology
+from Semantics.UnitModel import UnitModel
+from Semantics.TaskModel import TaskModel
+from Semantics.QueryInterpreter import QueryInterpreter
+from Task.Task import SubtaskManager, TaskError, STRAT_PERIODICAL,\
+ STRAT_ONDEMAND
+from DataProcessing.Parameter import ParameterList
+from Resource.node import node
+from DataProcessing.AggregatorManager import AggregatorManager
+from DataProcessing.MeasurementLevel import lut_level
+from paramiko.ssh_exception import BadAuthenticationType
+import logging
+from rdflib import Graph
+from StringIO import StringIO
+from DataProcessing.DataFormatter import JsonFormatter
+from DataProcessing.DataHeaderCell import CellRequestByFeature,\
+ CellRequestByName
+from DataProcessing.DataError import SamplerError
+
+class MonitoringService(object):
+ '''
+ classdocs
+ '''
+ version = "0.0"
+
+ def __str__(self):
+ return "NOVI Monitoring Service v%s @ %s" % (self.version, self.platform)
+
+ @property
+ def platform(self):
+ return self._if.platform
+
+ def __init__(self, interface, baseurl, config_owl):
+ '''
+ @summary: constructor
+ @param interface:
+ @type interface: MSInterface
+ @param baseurl: the location of the ontology files. Either poin to the file system or to a public url
+ @type baseurl: str
+ @param config_owl: platform specific configuration model
+ @type config_owl: str
+ '''
+ self._if = interface
+ self.logger = logging.getLogger(name = "NOVI.MS.%s" % self.platform)
+ self.log = self._if.log # to be removed
+ self.pm = PrefixManager()
+ self.um = UnitManager()
+ self.dm = DimensionManager(self.um)
+ self.stm = SubtaskManager(self.um)
+ self.am = AggregatorManager()
+ self.domains = []
+ self.features = []
+ self.ontology = Ontology(baseurl = baseurl, config_owl = config_owl)
+ self.unitmodel = UnitModel(self.ontology)
+ self.taskmodel = TaskModel(self.dm, self.um, self.ontology)
+ um = self.unitmodel
+
+ # infer and store prefixes
+ for (p_reference, p_symbol, base, exponent) in um.inferPrefixes():
+ self.pm.newPrefix( self.ontology._tail(p_reference), p_symbol, base, exponent )
+
+ # infer basic units
+ for u_reference, u_symbol in um.inferBaseUnits():
+ self.storeBasicUnit(u_reference, u_symbol)
+ for u_reference, u_symbol, _, _ in um.inferPowerUnits():
+ self.storeBasicUnit(u_reference, u_symbol)
+ for u_reference, u_symbol, _ in um.inferProductUnits():
+ self.storeBasicUnit(u_reference, u_symbol)
+ for u_reference, u_symbol, derivedfrom, scale, offset in um.inferLinearTransformedUnits():
+ self.storeLinearTransformedUnit(u_reference, u_symbol, derivedfrom, scale, offset)
+ for u_reference, u_symbol, derivedfrom, expr_fwd, expr_inv in um.inferRegexpTransformedUnits():
+ uref = self.ontology._tail(u_reference)
+ ancestor = self.um[ self.ontology._tail(derivedfrom) ]
+ self.um.addRegexpTransformedUnit(uref, u_symbol, ancestor, expr_fwd, expr_inv)
+
+ # infer dimensions
+ #FIXME: if there is a reference loop an error is raised...
+ for d_reference, u_reference, level in um.inferBaseDimensions():
+ dref = self.ontology._tail(d_reference)
+ uref = self.ontology._tail(u_reference)
+ lref = self.ontology._tail(level)
+ level = lut_level[lref]
+ unit = self.um[uref]
+ self.dm.newBaseDimension(dref, dref, unit, level)
+ for d_reference, u_reference, d_derivedfrom in um.inferDifferenceDimensions():
+ dref = self.ontology._tail(d_reference)
+ uref = self.ontology._tail(u_reference)
+ daref = self.ontology._tail(d_derivedfrom)
+ unit = self.um[uref]
+ derivedfrom = self.dm[daref]
+ self.dm.newDerivedDimension(dref, dref, unit, derivedfrom, self.dm.DifferenceDimension)
+ for d_reference, u_reference, d_derivedfrom, exponent in um.inferPowerDimensions():
+ dref = self.ontology._tail(d_reference)
+ uref = self.ontology._tail(u_reference)
+ daref = self.ontology._tail(d_derivedfrom)
+ unit = self.um[uref]
+ derivedfrom = self.dm[daref]
+ self.dm.newDerivedDimension(dref, dref, unit, derivedfrom, self.dm.PowerDimension, exponent = exponent)
+ for d_reference, u_reference, d_derivedfrom in um.inferProductDimensions():
+ dref = self.ontology._tail(d_reference)
+ uref = self.ontology._tail(u_reference)
+ unit = self.um[uref]
+ derivedfrom = tuple( self.dm[self.ontology._tail(x)] for x in d_derivedfrom )
+ self.dm.newDerivedDimension(dref, dref, unit, derivedfrom, self.dm.ProductDimension)
+ for d_reference, u_reference, d_derivedfrom in um.inferRatioDimensions():
+ dref = self.ontology._tail(d_reference)
+ uref = self.ontology._tail(u_reference)
+ daref = self.ontology._tail(d_derivedfrom)
+ unit = self.um[uref]
+ derivedfrom = self.dm[daref]
+ self.dm.newDerivedDimension(dref, dref, unit, derivedfrom, self.dm.RatioDimension)
+
+ # infer domains and features
+ for uri_domain in self.taskmodel.inferDomains():
+ self.domains.append(uri_domain)
+ for uri_feature, _, _ in self.taskmodel.inferFeatures():
+ self.features.append(uri_feature)
+
+ self.QI = QueryInterpreter(self.taskmodel)
+
+ self._nextID = 0
+ self.subtaskIDs = {}
+ self.aggregatorIDs = {}
+ self.formatters = {}
+
+
+ def storeBasicUnit(self, u_reference, u_symbol):
+ uref = self.ontology._tail(u_reference)
+ bu = self.um.newBasicUnit(uref, u_symbol)
+ for p_reference in self.unitmodel.inferPossiblePrefixesOf(u_reference):
+ p = self.pm[ self.ontology._tail(p_reference) ]
+ puref = "%s_%s" % (p.reference, uref)
+ symbol = "%s%s" % (p.symbol, bu.symbol)
+ self.um.addLinearTransformedUnit(puref, symbol, bu, p.scale)
+
+ def storeLinearTransformedUnit(self, u_reference, u_symbol, derivedfrom, scale, offset):
+ uref = self.ontology._tail(u_reference)
+ ancestor = self.um[ self.ontology._tail(derivedfrom) ]
+ bu = self.um.addLinearTransformedUnit(uref, u_symbol, ancestor, scale, offset)
+ for p_reference in self.unitmodel.inferPossiblePrefixesOf(u_reference):
+ p = self.pm[ self.ontology._tail(p_reference) ]
+ puref = "%s_%s" % (p.reference, uref)
+ symbol = "%s%s" % (p.symbol, bu.symbol)
+ self.um.addLinearTransformedUnit(puref, symbol, bu, p.scale)
+
+ def newProcessID(self):
+ try:
+ return "%s:process:%d" % (self.platform, self._nextID)
+ finally:
+ self._nextID += 1
+
+ def newAggregateID(self, isprocess = True):
+ try:
+ return "%s:aggregate:%d" % (self.platform, self._nextID)
+ finally:
+ self._nextID += 1
+
+ def measure(self, credential, query):
+ #TODO: docs
+ '''
+ '''
+ g = Graph()
+ g += self.ontology.graph
+ sio = StringIO(query)
+ g.parse(source = sio)
+ responses = []
+ errors = []
+ queries = self.QI.inferBundleQueries(qgraph = g)
+ self.log(shortmsg = "measurements starting...", message = "Attempt to launch %d measurement threads" % len(queries))
+ for q in queries:
+ feature_uri = q.feature
+ domain = self.ontology.ns('task')['Substrate']
+ taskgen = self.taskmodel.inferTasks(domain, feature_uri)
+ no_tool = True
+ (resource_uri, resource) = q.resource
+ #we are ugly here: use the first tool
+ for task_uri, _ in taskgen:
+ no_tool = False
+ _, task = self.newTask(task = task_uri, cred = credential, resource = resource, parameters = q.paramlist)
+ if q.samplechain:
+ task.strategy = STRAT_PERIODICAL
+ # we apply some aggregation to the data
+ flow = []
+ for skeleton, parlist in q.samplechain:
+ flow.append((skeleton, parlist.formkeyvaldict()))
+ aid = self.am.newAggregator(task.data, CellRequestByFeature(feature = q.feature), flow)
+ A = self.am[aid]
+ task.enable()
+ while True:
+ try:
+ task.dataAdded.wait( 15 )
+ formatter = JsonFormatter(datasource = A.data)
+ break
+ except SamplerError:
+ task.dataAdded.clear()
+ sleep(1)
+ else:
+ task.strategy = STRAT_ONDEMAND
+ task.enable()
+ task.dataAdded.wait( 15 )
+ formatter = JsonFormatter(datasource = task.data)
+ formatter.extract(cellrequest = [
+ CellRequestByName(name = "Run"),
+ CellRequestByFeature(feature = feature_uri)
+ ])
+ responses.append( formatter.serialize() )
+ if no_tool:
+ err_description = "No tools to measure %s @ %s" % (feature_uri, resource_uri)
+ errors.append(err_description)
+ self.log(shortmsg = "Limited result set", message = err_description)
+ useful_data = ",\n".join( responses )
+ error_data = "+".join(errors)
+ if len(errors):
+ if len(useful_data):
+ response = "{%s,\n\"errors\" : \"%s\"}" % (useful_data, error_data)
+ else:
+ response = "{\"errors\" : \"%s\"}" % (error_data)
+ else:
+ response = "{%s}" % useful_data
+ return response
+
+ def launchTasks(self, credential, query):
+ #TODO: many things in common with measure!!!
+ g = Graph()
+ g += self.ontology.graph
+ sio = StringIO(query)
+ g.parse(source = sio)
+ taskID = self.newID()
+ idstore = self.subtaskIDs[taskID] = []
+ formatters = self.formatters[taskID] = []
+ for q in self.QI.getBundleQuery(qgraph = g):
+ feature_uri = q.feature
+
+ print "PPPPP", q.paramlist
+
+ domain = self.ontology.ns('task')['Slice']
+ taskgen = self.taskmodel.inferTasks(domain, feature_uri)
+ #we are ugly here: use the first tool
+ for task_uri, _ in taskgen:
+ subtaskID, task = self.newTask(task = task_uri, cred = credential, resource = q.resource, parameters = q.paramlist)
+ task.strategy = STRAT_PERIODICAL
+ task.enable()
+ idstore.append(subtaskID)
+ f = q.formatter(datasource = task.data)
+ formatters.append(f)
+ if len(idstore):
+ return taskID
+ else:
+ self.subtaskIDs.pop(taskID)
+ self.formatters.pop(taskID)
+ return None
+
+
+
+ def newTask(self, task, cred, resource = None, parameters = ParameterList()):
+ '''
+ @summary: initialize a Task object, which is referenced by a uri
+ @param task: the reference to the task description
+ @type task: URIRef
+ @param cred: an iterable over dictionaries, which are used as input parameters to initialize Credential templates passed to the Task object for authentication, authorization purposes
+ @type cred: dict generator
+ @param resource: the resource to measure
+ @type resource: resource or None
+ @param parameters: the parameter list to refresh the default parameters of the Task object
+ @type parameters: ParameterList
+ @return: the tuple of taskID and the initialized measurement Task object
+ @rtype: int, Task
+ '''
+ name = self.ontology._tail(task)
+ credset = self.taskmodel.inferCredentialOf(task)
+ driver = self.taskmodel.inferDriverOf(task)
+ hdr = self.taskmodel.inferDataheaderOf(task)
+ hooks = self.taskmodel.inferHookdefinitionsOf(task)
+ hookpar = self.taskmodel.inferHookparametersOf(task)
+ taskparameters = self.taskmodel.inferParametersOf(task)
+
+ taskparameters.update_by_list(parameters)
+
+ #TODO: maybe better push resource to the Task as an argument
+ if isinstance(resource, node):
+ addr, unit = resource.get_ipaddress("eth0")
+ taskparameters.update("SourceAddress", addr, unit)
+# print taskparameters
+
+ while len(credset):
+ ct = credset.pop()
+ for c in cred:
+ try:
+ credential = ct(**c)
+ except:
+ # credential mismatch go on with the next
+ continue
+ try:
+ return self.stm.generate(name = name, driver = driver, dataheader = hdr,
+ hookimplementations = hooks, parameters = taskparameters, credential = credential, **hookpar)
+ except BadAuthenticationType:
+ pass
+ raise TaskError("Cannot initialize the Task with the credential set provided for %s" % name)
+
+ def delTask(self, taskidentifier):
+ self.stm.pop( taskidentifier )
+
+ def getTask(self, taskidentifier):
+ return self.stm[ taskidentifier ]
+
+ def attachAggregators(self, credential, query):
+ g = Graph()
+ g += self.ontology.graph
+ sio = StringIO(query)
+ g.parse(source = sio)
+ aggregatorID = self.newID()
+ idstore = self.aggregatorIDs[aggregatorID] = []
+ formatters = self.formatters[aggregatorID] = []
+ raise Exception("unimplemented")
+# for q in self.QI.getBundleQuery(qgraph = g):
+# feature_uri = q.feature
+#
+# print "PPPPP", q.paramlist
+#
+# domain = self.ontology.ns('task')['Slice']
+# taskgen = self.taskmodel.inferTasks(domain, feature_uri)
+# #we are ugly here: use the first tool
+# for task_uri, _ in taskgen:
+# subtaskID, task = self.newTask(task = task_uri, cred = credential, resource = q.resource, parameters = q.paramlist)
+# task.strategy = STRAT_PERIODICAL
+# task.enable()
+# idstore.append(subtaskID)
+# f = q.formatter(datasource = task.data)
+# formatters.append(f)
+ if len(idstore):
+ return aggregatorID
+ else:
+ self.subtaskIDs.pop(aggregatorID)
+ self.formatters.pop(aggregatorID)
+ return None
+
+ def newAggregator(self):
+ pass
+
+ def delAggregator(self, aggregatoridentifier):
+ self.am.pop( aggregatoridentifier )
+
+ def getAggregator(self, aggregatoridentifier):
+ return self.am[ aggregatoridentifier ]
+
diff --git a/Monitoring/src/main/python/Service/__init__$py.class b/Monitoring/src/main/python/Service/__init__$py.class
new file mode 100644
index 0000000..7dcd0ad
--- /dev/null
+++ b/Monitoring/src/main/python/Service/__init__$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Service/__init__.py b/Monitoring/src/main/python/Service/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Monitoring/src/main/python/Service/__init__.py
diff --git a/Monitoring/src/main/python/Service/__init__.py.old b/Monitoring/src/main/python/Service/__init__.py.old
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Monitoring/src/main/python/Service/__init__.py.old
diff --git a/Monitoring/src/main/python/Service/a b/Monitoring/src/main/python/Service/a
new file mode 100644
index 0000000..7af859c
--- /dev/null
+++ b/Monitoring/src/main/python/Service/a
@@ -0,0 +1,38 @@
+>
+ <core:hasInboundInterface rdf:resource="http://foo.bar/req.owl#ifin"/>
+ <core:hasIPv4Address rdf:resource="http://foo.bar/req.owl#smilax_address"/>
+ <core:hasIPv4Address rdf:resource="http://foo.bar/req.owl#smilax_address"/>
+ <core:hasOutboundInterface rdf:resource="http://foo.bar/req.owl#ifin"/>
+ <feature:hasFeature rdf:resource="http://fp7-novi.eu/monitoring_features.owl#MemoryUtilization"/>
+ <query:hasFormatter rdf:resource="http://fp7-novi.eu/monitoring_query.owl#Formatter_JSON"/>
+ <query:hasResource rdf:resource="http://foo.bar/req.owl#smilax1"/>
+ </rdf:Description>
+ </rdf:Description>
+ </rdf:Description>
+ </rdf:Description>
+ </rdf:Description>
+ <rdf:Description rdf:about="http://foo.bar/req.owl#ifin">
+ <rdf:Description rdf:about="http://foo.bar/req.owl#ifout">
+ <rdf:Description rdf:about="http://foo.bar/req.owl#measureMemoryInformation">
+ <rdf:Description rdf:about="http://foo.bar/req.owl#smilax1">
+ <rdf:Description rdf:about="http://foo.bar/req.owl#smilax_address">
+</rdf:RDF>
+<rdf:RDF
+ <rdf:type rdf:resource="http://fp7-novi.eu/im.owl#Interface"/>
+ <rdf:type rdf:resource="http://fp7-novi.eu/im.owl#Interface"/>
+ <rdf:type rdf:resource="http://fp7-novi.eu/im.owl#Node"/>
+ <rdf:type rdf:resource="http://fp7-novi.eu/im.owl#Resource"/>
+ <rdf:type rdf:resource="http://fp7-novi.eu/monitoring_query.owl#BundleQuery"/>
+ <rdf:type rdf:resource="http://fp7-novi.eu/unit.owl#IPAddress"/>
+ <rdf:type rdf:resource="http://www.w3.org/2002/07/owl#NamedIndividual"/>
+ <rdf:type rdf:resource="http://www.w3.org/2002/07/owl#NamedIndividual"/>
+ <rdf:type rdf:resource="http://www.w3.org/2002/07/owl#NamedIndividual"/>
+ <stat:hasSample rdf:resource="http://fp7-novi.eu/monitoring_stat.owl#UnmodifiedExtractOfFeatureSamples"/>
+ <unit:hasValue>150.254.160.19</unit:hasValue>
+ xmlns:core="http://fp7-novi.eu/im.owl#"
+ xmlns:feature="http://fp7-novi.eu/monitoring_features.owl#"
+ xmlns:query="http://fp7-novi.eu/monitoring_query.owl#"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:stat="http://fp7-novi.eu/monitoring_stat.owl#"
+ xmlns:unit="http://fp7-novi.eu/unit.owl#"
+<?xml version="1.0" encoding="UTF-8"?>
diff --git a/Monitoring/src/main/python/Service/b b/Monitoring/src/main/python/Service/b
new file mode 100644
index 0000000..59b1a36
--- /dev/null
+++ b/Monitoring/src/main/python/Service/b
@@ -0,0 +1,43 @@
+>
+ <core:hasInboundInterface rdf:resource="http://foo.bar/req.owl#ifin"/>
+ <core:hasIPv4Address rdf:resource="http://foo.bar/req.owl#ifin_address"/>
+ <core:hasIPv4Address rdf:resource="http://foo.bar/req.owl#ifout_address"/>
+ <core:hasOutboundInterface rdf:resource="http://foo.bar/req.owl#ifout"/>
+ <feature:hasFeature rdf:resource="http://fp7-novi.eu/monitoring_features.owl#MemoryUtilization"/>
+ <query:hasFormatter rdf:resource="http://fp7-novi.eu/monitoring_query.owl#Formatter_JSON"/>
+ <query:hasResource rdf:resource="http://foo.bar/req.owl#smilax1"/>
+ </rdf:Description>
+ </rdf:Description>
+ </rdf:Description>
+ </rdf:Description>
+ </rdf:Description>
+ </rdf:Description>
+ <rdf:Description rdf:about="http://foo.bar/req.owl#ifin">
+ <rdf:Description rdf:about="http://foo.bar/req.owl#ifin_address">
+ <rdf:Description rdf:about="http://foo.bar/req.owl#ifout">
+ <rdf:Description rdf:about="http://foo.bar/req.owl#ifout_address">
+ <rdf:Description rdf:about="http://foo.bar/req.owl#measureMemoryInfo">
+ <rdf:Description rdf:about="http://foo.bar/req.owl#smilax1">
+</rdf:RDF>
+<rdf:RDF
+ <rdf:type rdf:resource="http://fp7-novi.eu/im.owl#Interface"/>
+ <rdf:type rdf:resource="http://fp7-novi.eu/im.owl#Interface"/>
+ <rdf:type rdf:resource="http://fp7-novi.eu/im.owl#Node"/>
+ <rdf:type rdf:resource="http://fp7-novi.eu/im.owl#Resource"/>
+ <rdf:type rdf:resource="http://fp7-novi.eu/monitoring_query.owl#BundleQuery"/>
+ <rdf:type rdf:resource="http://fp7-novi.eu/unit.owl#IPAddress"/>
+ <rdf:type rdf:resource="http://fp7-novi.eu/unit.owl#IPAddress"/>
+ <rdf:type rdf:resource="http://www.w3.org/2002/07/owl#NamedIndividual"/>
+ <rdf:type rdf:resource="http://www.w3.org/2002/07/owl#NamedIndividual"/>
+ <rdf:type rdf:resource="http://www.w3.org/2002/07/owl#NamedIndividual"/>
+ <rdf:type rdf:resource="http://www.w3.org/2002/07/owl#NamedIndividual"/>
+ <stat:hasSample rdf:resource="http://fp7-novi.eu/monitoring_stat.owl#UnmodifiedExtractOfFeatureSamples"/>
+ <unit:hasValue>150.254.160.19</unit:hasValue>
+ <unit:hasValue>150.254.160.19</unit:hasValue>
+ xmlns:core="http://fp7-novi.eu/im.owl#"
+ xmlns:feature="http://fp7-novi.eu/monitoring_features.owl#"
+ xmlns:query="http://fp7-novi.eu/monitoring_query.owl#"
+ xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
+ xmlns:stat="http://fp7-novi.eu/monitoring_stat.owl#"
+ xmlns:unit="http://fp7-novi.eu/unit.owl#"
+<?xml version="1.0" encoding="UTF-8"?>
diff --git a/Monitoring/src/main/python/Service/interface$py.class b/Monitoring/src/main/python/Service/interface$py.class
new file mode 100644
index 0000000..2524e39
--- /dev/null
+++ b/Monitoring/src/main/python/Service/interface$py.class
Binary files differ
diff --git a/Monitoring/src/main/python/Service/interface.py b/Monitoring/src/main/python/Service/interface.py
new file mode 100644
index 0000000..104815f
--- /dev/null
+++ b/Monitoring/src/main/python/Service/interface.py
@@ -0,0 +1,316 @@
+'''
+Created on 08.08.2011
+
+@author: steger, jozsef
+'''
+
+from rdflib import Graph
+from StringIO import StringIO
+from Service.MonitoringService import MonitoringService
+import logging
+
+class InterfaceError(Exception):
+ pass
+
+#TODO: add and handle bindings at this level
+class MSInterface(object):
+ '''
+ @summary: Implements a thin service layer on top of the MonitoringService instance
+ to collect methods that need to be exported and mapped in the NOVI API.
+ It also provides a reference to the framework to be able to communicate with
+ remote MonitoringService instances. The log() method is a place holder
+ to sink information to be pushed in the NOVI UserFeedback service.
+ The emit() method is a place holder to sink signals to be pushed in the NOVI
+ Policy Service component installed on top of the same platform.
+ '''
+
+ def __init__(self, framework, reference, baseurl, config_owl):
+ '''
+ Constructor
+ @param framework: a service which provides getService() method to look up MonSrv instances of different reference
+ @type framework: Framework
+ @param reference: the name of the platform
+ @type reference: str
+ @param baseurl: the location of the ontology files. Either point to the file system or to a public url
+ @type baseurl: str
+ @param config_owl: platform specific configuration model
+ @type config_owl: str
+ '''
+ self.framework = framework
+ self.platform = reference
+ self._ms = MonitoringService(self, baseurl, config_owl)
+ self.logger = logging.getLogger(name = "NOVI.MSI.%s" % reference)
+
+ def _get_service(self):
+ '''
+ @return: the underlying monitoring service component
+ @rtype: MonitoringService
+ '''
+ return self._ms
+
+ def _get_proxy(self):
+ '''
+ @return: a proxy service to look up the rest of the monitoring service components
+ @rtype: Framework
+ '''
+ return self._framework
+
+ def dispatchID(self, identifier):
+ '''
+ @summary: this method finds the MonitoringService instance that is responsible for handling an identified Task or Aggregate
+ @param identifier: identifier of a task or aggregate, it follows the form: <platform>:<process|aggregate>:<id>
+ @type identifier: string
+ @return: the monitoring service instance
+ @rtype: MonitoringService
+ '''
+ try:
+ platform, _, _ = identifier.split(':')
+ if self.platform == platform:
+ return self.service
+ return self.framework.getService(platform)
+ except ValueError:
+ raise InterfaceError("Wrong identifier format")
+
+ def log(self, shortmsg, message):
+ # overridden by the JAVA wrapper
+ self.logger.info("[%s] %s" % (shortmsg, message))
+
+ def emit(self, what):
+ # overridden by the JAVA wrapper
+ self.framework.getPolicyService(self.platform).trigger(what)
+
+ # Test purpose function
+ def echo(self, platform):
+ '''
+ @summary: An integration tester function (to be exported public)
+ @param platform: name of the platform
+ @type platform: string
+ @return: messages of the platforms taking part in the message flow
+ @rtype: string
+ '''
+ self.logger.info("[echo] calling %s" % platform)
+ try:
+ otherservice = self.framework.getService(platform).getPlatform()
+ return "%s -> %s" % (str(self.platform), str(otherservice)), ""
+ except:
+ return "Exception: %s" % str(self.platform), ""
+
+
+ # Substrate monitoring function
+ def measure(self, credential, query):
+ '''
+ @summary: Method to handle substrate monitoring queries (to be exported public)
+ @param credential:
+ @type credential:
+ @param query: an owl document containing several BundleQuery instances
+ @type query: string
+ @return: response to the query
+ @rtype: string
+ '''
+ #TODO: split query and concatenate results
+ return self.service.measure(credential, query)
+
+ # Slice monitoring functions
+ def sliceTasks(self, credential, query):
+ raise InterfaceError("sliceTasks() method is not implemented")
+
+ def addTask(self, credential, query):
+ '''
+ @summary: Method to start slice monitoring tasks (to be exported public)
+ @param credential:
+ @type credential:
+ @param query: an owl document containing several BundleQuery instances
+ @type query: string
+ @return: process identifier
+ @rtype: string
+ '''
+ #TODO: investigate if the service instance under this interface should be the boss
+ return self.service.launchTasks(credential, query)
+
+ def describeTaskData(self, credential, query):
+ '''
+ @summary: Method to retrieve meta data of task data (to be exported public)
+ @param credential:
+ @type credential:
+ @param query:
+ @type query: string
+ @return: serialize the header of the data tables
+ @rtype: string
+ '''
+ taskID = query
+ ms = self.dispatchID(identifier = taskID)
+ #TODO: move this in the MonitoringService
+ headers = map(lambda x: x.header(), ms.formatters[taskID])
+ return "[%s]" % "\n,\n".join(headers)
+
+ def fetchTaskData(self, credential, query):
+ '''
+ @summary: Method to retrieve task data collected since last fetch or the start (to be exported public)
+ @param credential:
+ @type credential:
+ @param query:
+ @type query: string
+ @return: serialize the appended content of the data tables
+ @rtype: string
+ '''
+ taskID = query
+ ms = self.dispatchID(identifier = taskID)
+ #TODO: move this in the MonitoringService
+ response = []
+ try:
+ for f in ms.formatters[taskID]:
+ response.append( f.serialize() )
+ except Exception, e:
+ print "EEE", e
+ pass
+ return "[%s]" % "\n,\n".join(response)
+
+ def modifyTask(self, credential, query):
+ raise InterfaceError("modifyTask() method is not implemented")
+
+ def removeTask(self, credential, query):
+ '''
+ @summary: Method to remove a slice measurement task (to be exported public)
+ @param credential:
+ @type credential:
+ @param query:
+ @type query: string
+ '''
+ taskID = query
+ ms = self.dispatchID(identifier = taskID)
+ #TODO: move this in the MonitoringService
+ try:
+ subtaskids = ms.subtaskIDs.pop(taskID)
+ ms.formatters.pop(taskID)
+ while len(subtaskids):
+ subtaskid = subtaskids.pop()
+ ms.delTask(taskidentifier = subtaskid)
+ except KeyError:
+ # the taskID does not belong to me
+ pass
+
+ def enableTask(self, credential, query):
+ '''
+ @summary: Method to enable a slice measurement task (to be exported public)
+ @param credential:
+ @type credential:
+ @param query:
+ @type query: string
+ '''
+ taskID = query
+ ms = self.dispatchID(identifier = taskID)
+ try:
+ for subtaskid in ms.subtaskIDs[taskID]:
+ t = ms.getTask(taskidentifier = subtaskid)
+ t.enable()
+ except KeyError:
+ # the taskID does not belong to me
+ pass
+
+ def disableTask(self, credential, query):
+ '''
+ @summary: Method to disable a slice measurement task temporarily (to be exported public)
+ @param credential:
+ @type credential:
+ @param query:
+ @type query: string
+ '''
+ taskID = query
+ ms = self.dispatchID(identifier = taskID)
+ try:
+ for subtaskid in ms.subtaskIDs[taskID]:
+ t = ms.getTask(taskidentifier = subtaskid)
+ t.disable()
+ except KeyError:
+ # the taskID does not belong to me
+ pass
+
+ def getTaskStatus(self, credential, query):
+ '''
+ @summary: Method to check the state of a slice measurement task (to be exported public)
+ @param credential:
+ @type credential:
+ @param query:
+ @type query: string
+ @return: True if the tasks are running
+ @rtype: boolean
+ '''
+ taskID = query
+ ms = self.dispatchID(identifier = taskID)
+ try:
+ for subtaskid in ms.subtaskIDs[taskID]:
+ t = ms.getTask(taskidentifier = subtaskid)
+ if t.state == t.STATE_RUNNING:
+ return True
+ except KeyError:
+ # the taskID does not belong to me
+ pass
+ return False
+
+ def addAggregator(self, credential, query):
+ '''
+ @summary: Method to define new data manipulation on slice monitoring data (to be exported public)
+ @param credential:
+ @type credential:
+ @param query: an owl document containing several SampleManipulationQuery instances
+ @type query: string
+ @return: aggregator identifier
+ @rtype: string
+ '''
+ #TODO: investigate if the service instance under this interface should be the boss
+ return self.service.attachAggregators(credential, query)
+
+ def removeAggregator(self, credential, query):
+ '''
+ @summary: Method to remove data manipulation on slice monitoring data (to be exported public)
+ @param credential:
+ @type credential:
+ @param query:
+ @type query: string
+ '''
+ aggregatorID = query
+ ms = self.dispatchID(identifier = aggregatorID)
+ try:
+ aggregatorids = ms.aggregatorIDs.pop(aggregatorID)
+ ms.formatters.pop(aggregatorID)
+ while len(aggregatorids):
+ aggregatorid = aggregatorids.pop()
+ ms.delAggregator(aggregatorid)
+ except KeyError:
+ # the taskID does not belong to me
+ pass
+
+ def fetchAggregatorData(self, credential, query):
+ '''
+ @summary: Method to refresh and serialize results of data manipulation on slice monitoring data (to be exported public)
+ @param credential:
+ @type credential:
+ @param query:
+ @type query: string
+ @return: result of aggregators
+ @rtype: string
+ '''
+ aggregatorID = query
+ ms = self.dispatchID(identifier = aggregatorID)
+ response = []
+ try:
+ for f in ms.formatters[aggregatorID]:
+ response.append( f.serialize() )
+ except Exception, e:
+ print "EEE", e
+ pass
+ return "[%s]" % "\n,\n".join(response)
+
+ def addCondition(self, credential, query):
+ raise InterfaceError("addCondition() method is not implemented")
+
+ def modifyCondition(self, credential, query):
+ raise InterfaceError("modifyCondition() method is not implemented")
+
+ def removeCondition(self, credential, query):
+ raise InterfaceError("removeCondition() method is not implemented")
+
+
+ proxy = property(_get_proxy,None,None)
+
+ service = property(_get_service,None,None)
diff --git a/Monitoring/src/main/python/Service/interface.py.old b/Monitoring/src/main/python/Service/interface.py.old
new file mode 100644
index 0000000..c914bc1
--- /dev/null
+++ b/Monitoring/src/main/python/Service/interface.py.old
@@ -0,0 +1,308 @@
+'''
+Created on 08.08.2011
+
+@author: steger, jozsef
+'''
+from Service.MonitoringService import MonitoringService
+import logging
+
+class InterfaceError(Exception):
+ pass
+
+#TODO: add and handle bindings at this level
+class MSInterface(object):
+ '''
+ @summary: Implements a thin service layer on top of the MonitoringService instance
+ to collect methods that need to be exported and mapped in the NOVI API.
+ It also provides a reference to the framework to be able to communicate with
+ remote MonitoringService instances. The log() method is a place holder
+ to sink information to be pushed in the NOVI UserFeedback service.
+ The emit() method is a place holder to sink signals to be pushed in the NOVI
+ Policy Service component installed on top of the same platform.
+ '''
+
+ def __init__(self, framework, reference, baseurl, config_owl):
+ '''
+ Constructor
+ @param framework: a service which provides getService() method to look up MonSrv instances of different reference
+ @type framework: Framework
+ @param reference: the name of the platform
+ @type reference: str
+ @param baseurl: the location of the ontology files. Either point to the file system or to a public url
+ @type baseurl: str
+ @param config_owl: platform specific configuration model
+ @type config_owl: str
+ '''
+ self.framework = framework
+ self.platform = reference
+ self._ms = MonitoringService(self, baseurl, config_owl)
+ self.logger = logging.getLogger(name = "NOVI.MSI.%s" % reference)
+
+ @property
+ def service(self):
+ '''
+ @return: the underlying monitoring service component
+ @rtype: MonitoringService
+ '''
+ return self._ms
+
+ @property
+ def proxy(self):
+ '''
+ @return: a proxy service to look up the rest of the monitoring service components
+ @rtype: Framework
+ '''
+ return self._framework
+
+ def dispatchID(self, identifier):
+ '''
+ @summary: this method finds the MonitoringService instance that is responsible for handling an identified Task or Aggregate
+ @param identifier: identifier of a task or aggregate, it follows the form: <platform>:<process|aggregate>:<id>
+ @type identifier: string
+ @return: the monitoring service instance
+ @rtype: MonitoringService
+ '''
+ try:
+ platform, _, _ = identifier.split(':')
+ if self.platform == platform:
+ return self.service
+ return self.framework.getService(platform)
+ except ValueError:
<