/** * Start. * * @see com.jamonapi.Monitor#start() */ @Override public void start() { monitor.start(); }
/** * Start. * * @see com.jamonapi.Monitor#start() */ public void start() { monitor.start(); }
public Monitor start() { for (int i=0;i<numRows;i++) monitors[i].start(); return this; }
@Override public Monitor start() { for (int i=0;i<numRows;i++) monitors[i].start(); return this; }
private void internalParseMultiThreaded(List<Pair<TreeNode, Short>> initTrees, int n) { Monitor parseMon = MonitorFactory.getTimeMonitor("parse"); ExecutorService threadPool = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors()); parseMon.start(); for (int k = 0; k < initTrees.size(); k++) { Pair<TreeNode, Short> pair = initTrees.get(k); TreeNode tree = pair.getFirst(); short tid = pair.getSecond(); threadPool.execute(new TreeProcessor(tree, tid, n)); } threadPool.shutdown(); while(!threadPool.isTerminated()){ } parseMon.start(); }
public String createQuery(Set<String> individuals, String aboxfilter) { Monitor monABoxQueryGeneration = MonitorFactory.getTimeMonitor("ABox query generator").start(); StringBuilder builder = new StringBuilder(); builder.append("PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>\n"); builder.append("CONSTRUCT {?s ?p ?o } "); builder.append("{ ?s ?p ?o . " ); builder.append(makeInFilter("?s", individuals)); if (aboxfilter != null) { builder.append(aboxfilter); } builder.append("FILTER ( (?p!=rdf:type))"); builder.append("}"); monABoxQueryGeneration.stop(); return builder.toString(); }
public String createQuery(Set<String> individuals, String filter) { Monitor monTquery = MonitorFactory.getTimeMonitor("TBox query generator") .start(); StringBuilder builder = new StringBuilder( "CONSTRUCT { ?example a ?class . } "); builder.append("{ ?example a ?class . "); builder.append("Filter ( ?example IN("); for (String individual : individuals) { if (!individual.startsWith("<")) builder.append("<"); builder.append(individual); if (!individual.endsWith(">")) builder.append(">"); builder.append(", "); } builder.deleteCharAt(builder.length() - 2); builder.append(")) . \n"); if (filter != null) { builder.append(filter); } builder.append("}"); monTquery.stop(); return builder.toString(); }
private String getHash(String string) { Monitor hashTime = JamonMonitorLogger.getTimeMonitor(Cache.class, "HashTime").start(); // calculate md5 hash of the string (code is somewhat // difficult to read, but there doesn't seem to be a // single function call in Java for md5 hashing) MessageDigest md5 = null; try { md5 = MessageDigest.getInstance("MD5"); } catch (NoSuchAlgorithmException e) { e.printStackTrace(); } md5.reset(); md5.update(string.getBytes()); byte[] result = md5.digest(); StringBuffer hexString = new StringBuffer(); for (byte aResult : result) { hexString.append(Integer.toHexString(0xFF & aResult)); } String str = hexString.toString(); hashTime.stop(); return str; }
public String getQuestion(QueryTree<N> lgg, List<QueryTree<N>> negTrees, List<String> knownResources) throws TimeOutException{ // return computeQuestionOptimized(lgg, negTrees, knownResources); mon.start(); String question = computeQuestionBetterPerformance(lgg, negTrees, knownResources); mon.stop(); return question; }
public Monitor start(MonKeyImp monkey, int index) { return get(monkey).getMonitors()[index].start(); }
/** * this checks for consistency and manipulates the tuples, before they get * triple */ public SortedSet<RDFNodeTuple> manipulate( Node node, SortedSet<RDFNodeTuple> tuples) { Monitor m = JamonMonitorLogger.getTimeMonitor(Manipulator.class, "Time for Rules").start(); //logger.warn("before: "+tuples.size()); for (Rule rule : rules) { tuples = rule.applyRule(node, tuples); } //logger.warn("after: "+tuples.size()); m.stop(); return tuples; }
/** * Start the monitor. */ public void start() { if(this.isActive) { // when reseting using the JAMon GUI the custom ranges are discarded MonitorFactory.setRangeDefault(MONITOR_LABEL, Jamon2PerformanceMonitorImpl.rangeHolder); // do the internal house-keeping this.startTime = System.currentTimeMillis(); MethodToStringBuilderImpl methodToStringBuilder = new MethodToStringBuilderImpl(this.method, 0); String methodSignature = methodToStringBuilder.toString(); this.monitor = MonitorFactory.getMonitor(methodSignature, MONITOR_LABEL); this.monitor.start(); } }
/** * Start the monitor. */ public void start() { if(this.isActive) { // when reseting using the JAMon GUI the custom ranges are discarded MonitorFactory.setRangeDefault(MONITOR_LABEL, Jamon2PerformanceMonitorImpl.rangeHolder); // do the internal house-keeping this.startTime = System.currentTimeMillis(); MethodToStringBuilderImpl methodToStringBuilder = new MethodToStringBuilderImpl(this.method, 0); String methodSignature = methodToStringBuilder.toString(); this.monitor = MonitorFactory.getMonitor(methodSignature, MONITOR_LABEL); this.monitor.start(); } }
/** * Note. I am no longer sure that the following statements still hold true for jetty. 2/15/15 * * Jetty Handlers does not let jamon start/stop time them. It seems the request is done by the time jamon gets it. * To overcome this use the jetty api to get the time of a request for a page. If it isn't a jetty request then call * the parent. Note although start is called because the timing for jetty is done after the request is finished * 'active' statistics will not be accurate. */ @Override // Only called if this is a time monitor i.e units are 'ms.' Monitor startTimeMon(HttpMonRequest httpMonBase) { if (httpMonBase.getRequest() instanceof Request) return MonitorFactory.getMonitor(getMonKey(httpMonBase)).start(); else return super.startTimeMon(httpMonBase); }
public OWLOntology getOWLAPIOntologyForNodes(List<Node> nodes, boolean saveOntology){ Monitor m1 = JamonMonitorLogger.getTimeMonitor(Manager.class, "Time conversion to OWL Ontology").start(); for (Node n : nodes) { n.toOWLOntology(configuration.getOwlAPIOntologyCollector()); } m1.stop(); if(saveOntology){ Monitor m2 = JamonMonitorLogger.getTimeMonitor(Manager.class, "Time saving Ontology").start(); configuration.getOwlAPIOntologyCollector().saveOntology(); m2.stop(); } return configuration.getOwlAPIOntologyCollector().getCurrentOntology(); }
@Override public RDFResourceTree getLGG(RDFResourceTree tree1, RDFResourceTree tree2, boolean learnFilters) { startTime = System.currentTimeMillis(); reset(); // apply some pre-processing tree1 = preProcess(tree1); tree2 = preProcess(tree2); // compute the LGG mon.start(); RDFResourceTree lgg = computeLGG(tree1, tree2, learnFilters); mon.stop(); // apply some post-processing lgg = postProcess(lgg); addNumbering(0, lgg); return lgg; }
private TreeSet<OWLClassExpression> refineNode(OENode node) { logger.trace(sparql_debug,"REFINE NODE " + node); MonitorFactory.getTimeMonitor("refineNode").start(); // we have to remove and add the node since its heuristic evaluation changes through the expansion // (you *must not* include any criteria in the heuristic which are modified outside of this method, // otherwise you may see rarely occurring but critical false ordering in the nodes set) searchTree.updatePrepare(node); int horizExp = node.getHorizontalExpansion(); TreeSet<OWLClassExpression> refinements = (TreeSet<OWLClassExpression>) operator.refine(node.getDescription(), horizExp+1); // System.out.println("refinements: " + refinements); node.incHorizontalExpansion(); node.setRefinementCount(refinements.size()); // System.out.println("refined node: " + node); searchTree.updateDone(node); MonitorFactory.getTimeMonitor("refineNode").stop(); return refinements; }
@Override public void put(String instanceKey) { Monitor mon = MonitorFactory.getTimeMonitor(getJamonLabel(".put()")); // only allow 1 process to put at the sametime. if (mon.getActive() < 1) { mon.start(); try { if (instanceKey!=null) { jamonDataPersister.put(instanceKey); } } catch(Throwable t) { MonitorFactory.addException(mon, t); } finally { mon.stop(); } } }
private TreeSet<OWLClassExpression> refineNode(LengthLimitedRefinementOperator operator, OENode node) { MonitorFactory.getTimeMonitor("refineNode").start(); // we have to remove and add the node since its heuristic evaluation changes through the expansion // (you *must not* include any criteria in the heuristic which are modified outside of this method, // otherwise you may see rarely occurring but critical false ordering in the nodes set) searchTree.updatePrepare(node); int horizExp = node.getHorizontalExpansion(); TreeSet<OWLClassExpression> refinements = (TreeSet<OWLClassExpression>) operator.refine(node.getDescription(), horizExp+1); // System.out.println("refinements: " + refinements); node.incHorizontalExpansion(); node.setRefinementCount(refinements.size()); // System.out.println("refined node: " + node); searchTree.updateDone(node); MonitorFactory.getTimeMonitor("refineNode").stop(); return refinements; }