@Override public boolean hasPermission(@Nonnull Authentication a, Permission permission) { if(a==SYSTEM) { if(LOGGER.isLoggable(FINE)) LOGGER.fine("hasPermission("+a+","+permission+")=>SYSTEM user has full access"); return true; } Boolean b = _hasPermission(a,permission); if(LOGGER.isLoggable(FINE)) LOGGER.fine("hasPermission("+a+","+permission+")=>"+(b==null?"null, thus false":b)); if(b==null) b=false; // default to rejection return b; }
void migrateUsers(UserIdMapper mapper) throws IOException { LOGGER.fine("Beginning migration of users to userId mapping."); Map<String, File> existingUsers = scanExistingUsers(); for (Map.Entry<String, File> existingUser : existingUsers.entrySet()) { File newDirectory = mapper.putIfAbsent(existingUser.getKey(), false); LOGGER.log(Level.INFO, "Migrating user '" + existingUser.getKey() + "' from 'users/" + existingUser.getValue().getName() + "/' to 'users/" + newDirectory.getName() + "/'"); Files.move(existingUser.getValue().toPath(), newDirectory.toPath(), StandardCopyOption.REPLACE_EXISTING); } mapper.save(); LOGGER.fine("Completed migration of users to userId mapping."); }
protected void addParsedValue(UpnpHeader.Type type, UpnpHeader value) { if (log.isLoggable(Level.FINE)) log.fine("Adding parsed header: " + value); List<UpnpHeader> list = parsedHeaders.get(type); if (list == null) { list = new LinkedList<>(); parsedHeaders.put(type, list); } list.add(value); }
protected void setGroupStart(int bid, int captureGroupId, int curPosition) { if (captureGroupId >= 0) { Map<Integer,MatchedGroup> matchedGroups = getMatchedGroups(bid, true); MatchedGroup mg = matchedGroups.get(captureGroupId); if (mg != null) { // This is possible if we have patterns like "( ... )+" in which case multiple nodes can match as the subgroup // We will match the first occurrence and use that as the subgroup (Java uses the last match as the subgroup) logger.fine("Setting matchBegin=" + curPosition + ": Capture group " + captureGroupId + " already exists: " + mg); } matchedGroups.put(captureGroupId, new MatchedGroup(curPosition, -1, null)); } }
LOGGER.fine("Detaching triangle."); if (triangleIndexes.length != 3) { throw new IllegalArgumentException("Cannot detach triangle with that does not have 3 indexes!"); if (!edgeRemoved[i]) { indexes.findPath(indexesPairs[i][0], indexesPairs[i][1], path); if (path.size() == 0) { indexes.findPath(indexesPairs[i][1], indexesPairs[i][0], path); if (path.size() == 0) { throw new IllegalStateException("Triangulation failed. Cannot find path between two indexes. Please apply triangulation in Blender as a workaround."); if (detachedFaces.size() == 0 && path.size() < indexes.size()) { Integer[] indexesSublist = path.toArray(new Integer[path.size()]); detachedFaces.add(new Face(indexesSublist, smooth, materialNumber, meshHelper.selectUVSubset(this, indexesSublist), meshHelper.selectVertexColorSubset(this, indexesSublist), temporalMesh)); for (int j = 0; j < path.size() - 1; ++j) { indexes.removeEdge(path.get(j), path.get(j + 1));
LOGGER.fine("Triangulating face."); assert indexes.size() >= 3 : "Invalid indexes amount for face. 3 is the required minimum!"; triangulatedFaces = new ArrayList<IndexesLoop>(indexes.size() - 2); while (facesToTriangulate.size() > 0 && warning == TriangulationWarning.NONE) { Face face = facesToTriangulate.remove(0); triangulatedFaces.add(face.getIndexes().clone()); } else { int previousIndex1 = -1, previousIndex2 = -1, previousIndex3 = -1; triangulatedFaces.add(new IndexesLoop(indexes)); LOGGER.log(Level.WARNING, "Errors occured during face triangulation: {0}. The face will be triangulated with the most direct algorithm, but the results might not be identical to blender.", e.getLocalizedMessage()); warning = TriangulationWarning.UNKNOWN; indexes[1] = this.getIndex(i); indexes[2] = this.getIndex(i + 1); triangulatedFaces.add(new IndexesLoop(indexes));
public Resource[] getResources(Device device) throws ValidationException { if (!device.isRoot()) return null; Set<Resource> resources = new HashSet<>(); List<ValidationError> errors = new ArrayList<>(); log.fine("Discovering local resources of device graph"); Resource[] discoveredResources = device.discoverResources(this); for (Resource resource : discoveredResources) { log.finer("Discovered: " + resource); if (!resources.add(resource)) { log.finer("Local resource already exists, queueing validation error"); errors.add(new ValidationError( getClass(), "resources", "Local URI namespace conflict between resources of device: " + resource )); } } if (errors.size() > 0) { throw new ValidationException("Validation of device graph failed, call getErrors() on exception", errors); } return resources.toArray(new Resource[resources.size()]); }
loadedFeatures.objects.add(object); loadedFeatures.lights.add(((LightNode) object).getLight()); } else if (object instanceof CameraNode && ((CameraNode) object).getCamera() != null) { loadedFeatures.cameras.add(((CameraNode) object).getCamera()); LOGGER.fine("Only image textures can be loaded as unlinked assets. Generated textures will be applied to an existing object."); LOGGER.fine("Loading unlinked animations is not yet supported!"); break; default: LOGGER.log(Level.FINEST, "Ommiting the block: {0}.", block.getCode()); LOGGER.fine("Baking constraints after every feature is loaded."); ConstraintHelper constraintHelper = blenderContext.getHelper(ConstraintHelper.class); constraintHelper.bakeConstraints(blenderContext); LOGGER.fine("Loading scenes and attaching them to the root object."); for (FileBlockHeader sceneBlock : loadedFeatures.sceneBlocks) { loadedFeatures.scenes.add(this.toScene(sceneBlock.getStructure(blenderContext), blenderContext)); LOGGER.fine("Creating the root node of the model and applying loaded nodes of the scene and loaded features to it."); Node modelRoot = new Node(blenderKey.getName()); for (Node scene : loadedFeatures.scenes) { LOGGER.fine("Setting loaded content as user data in resulting sptaial."); Map<String, Map<String, Object>> linkedData = new HashMap<String, Map<String, Object>>();
Collection<? extends ActionMap> ams = result.allInstances(); if (err.isLoggable(Level.FINE)) { err.fine("changed maps : " + ams); // NOI18N err.fine("previous maps: " + actionMaps); // NOI18N if (ams.size() == actionMaps.size()) { boolean theSame = true; int i = 0; tempActionMaps.add(new WeakReference<ActionMap>(actionMap)); if (err.isLoggable(Level.FINE)) { err.fine("clearActionPerformers"); // NOI18N
if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("Determining visibility of " + d + " in contexts of type " + contextClass); if (LOGGER.isLoggable(Level.FINER)) { LOGGER.finer("Querying " + f + " for visibility of " + d + " in type " + contextClass); if (LOGGER.isLoggable(Level.CONFIG)) { LOGGER.config("Filter " + f + " hides " + d + " in contexts of type " + contextClass); LOGGER.log(Level.WARNING, "Encountered error while processing filter " + f + " for contexts of type " + contextClass, e); throw e; } catch (Throwable e) { LOGGER.log(logLevelFor(f), "Uncaught exception from filter " + f + " for context of type " + contextClass, e); continue OUTER; // veto-ed. not shown r.add(d);
if (node == null) continue; // this computer is gone byName.put(node.getNodeName(),c); long start = System.currentTimeMillis(); updateComputer(s, byName, used, automaticSlaveLaunch); if (LOG_STARTUP_PERFORMANCE && LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(String.format("Took %dms to update node %s", System.currentTimeMillis() - start, s.getNodeName()));
if (LOGGER.isLoggable(Level.FINE)) { String logMessage = "Resolved SRV RR for " + srvDomain + ":"; for (SRVRecord r : srvRecords) logMessage += " " + r; LOGGER.fine(logMessage); addresses.add(hostAddress);
protected static void reportWeights(LinearClassifier<String, String> classifier, String classLabel) { if (classLabel != null) logger.fine("CLASSIFIER WEIGHTS FOR LABEL " + classLabel); Map<String, Counter<String>> labelsToFeatureWeights = classifier.weightsAsMapOfCounters(); List<String> labels = new ArrayList<>(labelsToFeatureWeights.keySet()); Collections.sort(labels); for (String label: labels) { Counter<String> featWeights = labelsToFeatureWeights.get(label); List<Pair<String, Double>> sorted = Counters.toSortedListWithCounts(featWeights); StringBuilder bos = new StringBuilder(); bos.append("WEIGHTS FOR LABEL ").append(label).append(':'); for (Pair<String, Double> feat: sorted) { bos.append(' ').append(feat.first()).append(':').append(feat.second()+"\n"); } logger.fine(bos.toString()); } }
private void scanFile(File log) { LOGGER.fine("Scanning "+log); try (Reader rawReader = new FileReader(log); BufferedReader r = new BufferedReader(rawReader)) { if (!findHeader(r)) return; // we should find a memory mapped file for secret.key String secretKey = getSecretKeyFile().getAbsolutePath(); String line; while ((line=r.readLine())!=null) { if (line.contains(secretKey)) { files.add(new HsErrPidFile(this,log)); return; } } } catch (IOException e) { // not a big enough deal. LOGGER.log(Level.FINE, "Failed to parse hs_err_pid file: " + log, e); } }
public List<String> annotateMulticlass(List<Datum<String, String>> testDatums) { List<String> predictedLabels = new ArrayList<>(); for (Datum<String, String> testDatum: testDatums) { String label = classOf(testDatum, null); Counter<String> probs = probabilityOf(testDatum); double prob = probs.getCount(label); StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); if (logger.isLoggable(Level.FINE)) { justificationOf(testDatum, pw, label); } logger.fine("JUSTIFICATION for label GOLD:" + testDatum.label() + " SYS:" + label + " (prob:" + prob + "):\n" + sw.toString() + "\nJustification done."); predictedLabels.add(label); if(! testDatum.label().equals(label)){ logger.info("Classification: found different type " + label + " for relation: " + testDatum); } else{ logger.info("Classification: found similar type " + label + " for relation: " + testDatum); } } return predictedLabels; }
/** * Persists a list of installing plugins; this is used in the case Jenkins fails mid-installation and needs to be restarted * @param installingPlugins */ public static synchronized void persistInstallStatus(List<UpdateCenterJob> installingPlugins) { File installingPluginsFile = getInstallingPluginsFile(); if(installingPlugins == null || installingPlugins.isEmpty()) { installingPluginsFile.delete(); return; } LOGGER.fine("Writing install state to: " + installingPluginsFile.getAbsolutePath()); Map<String,String> statuses = new HashMap<String,String>(); for(UpdateCenterJob j : installingPlugins) { if(j instanceof InstallationJob && j.getCorrelationId() != null) { // only include install jobs with a correlation id (directly selected) InstallationJob ij = (InstallationJob)j; InstallationStatus status = ij.status; String statusText = status.getType(); if(status instanceof Installing) { // flag currently installing plugins as pending statusText = "Pending"; } statuses.put(ij.plugin.name, statusText); } } try { String installingPluginXml = new XStream().toXML(statuses); FileUtils.write(installingPluginsFile, installingPluginXml); } catch (IOException e) { LOGGER.log(SEVERE, "Failed to save " + installingPluginsFile.getAbsolutePath(), e); } }
if (LOG.isLoggable(Level.FINE)) { LOG.fine("Skipping empty inline tag of @response.param in method " + methodDoc.qualifiedName() + ": " + tagName); break; default: LOG.warning("Unknown inline tag of @response.param in method " + methodDoc.qualifiedName() + ": " + tagName + " (value: " + tagText + ")"); responseDoc.getWadlParams().add(wadlParam); representationDoc.setDoc(tag.text()); } else { LOG.warning("Unknown response representation tag " + tag.name()); responseDoc.getRepresentations().add(representationDoc);
public LocalGENASubscription(LocalService service, Integer requestedDurationSeconds, List<URL> callbackURLs) throws Exception { super(service); setSubscriptionDuration(requestedDurationSeconds); log.fine("Reading initial state of local service at subscription time"); long currentTime = new Date().getTime(); this.currentValues.clear(); Collection<StateVariableValue> values = getService().getManager().getCurrentState(); log.finer("Got evented state variable values: " + values.size()); for (StateVariableValue value : values) { this.currentValues.put(value.getStateVariable().getName(), value); if (log.isLoggable(Level.FINEST)) { log.finer("Read state variable value '" + value.getStateVariable().getName() + "': " + value.toString()); } // Preserve "last sent" state for future moderation lastSentTimestamp.put(value.getStateVariable().getName(), currentTime); if (value.getStateVariable().isModeratedNumericType()) { lastSentNumericValue.put(value.getStateVariable().getName(), Long.valueOf(value.toString())); } } this.subscriptionId = SubscriptionIdHeader.PREFIX + UUID.randomUUID(); this.currentSequence = new UnsignedIntegerFourBytes(0); this.callbackURLs = callbackURLs; }
void maintain() { if (getDeviceItems().isEmpty()) return; // Remove expired remote devices Map<UDN, RemoteDevice> expiredRemoteDevices = new HashMap<>(); for (RegistryItem<UDN, RemoteDevice> remoteItem : getDeviceItems()) { if (log.isLoggable(Level.FINEST)) log.finest("Device '" + remoteItem.getItem() + "' expires in seconds: " + remoteItem.getExpirationDetails().getSecondsUntilExpiration()); if (remoteItem.getExpirationDetails().hasExpired(false)) { expiredRemoteDevices.put(remoteItem.getKey(), remoteItem.getItem()); } } for (RemoteDevice remoteDevice : expiredRemoteDevices.values()) { if (log.isLoggable(Level.FINE)) log.fine("Removing expired: " + remoteDevice); remove(remoteDevice); } // Renew outgoing subscriptions Set<RemoteGENASubscription> expiredOutgoingSubscriptions = new HashSet<>(); for (RegistryItem<String, RemoteGENASubscription> item : getSubscriptionItems()) { if (item.getExpirationDetails().hasExpired(true)) { expiredOutgoingSubscriptions.add(item.getItem()); } } for (RemoteGENASubscription subscription : expiredOutgoingSubscriptions) { if (log.isLoggable(Level.FINEST)) log.fine("Renewing outgoing subscription: " + subscription); renewOutgoingSubscription(subscription); } }