List of usage examples for org.apache.commons.logging Log info
void info(Object message);
From source file:hotbeans.support.FileSystemHotBeanModuleRepository.java
/** * Internal method to update a module.//from w ww. j a v a2 s. co m */ protected HotBeanModuleInfo updateModuleInternal(String moduleName, final InputStream moduleFileStream, final boolean add) { long revisionNumber = -1; HotBeanModuleInfo hotBeanModuleInfo = null; Log logger = this.getLog(); synchronized (super.getLock()) { // If update - module name must be specified if (!add && ((moduleName == null) || (moduleName.trim().length() == 0))) throw new HotBeansException("Module name not specified!"); RepositoryFileLock fileLock = null; File moduleTempFile = null; InputStream moduleTempFileStream = null; try { // Save module file to temp file moduleTempFile = File.createTempFile("hotBeanModule", ".jar"); FileCopyUtils.copy(moduleFileStream, new FileOutputStream(moduleTempFile)); // Get name from mainfest Manifest manifest = ModuleManifestUtils.readManifest(moduleTempFile); String jarFileModuleName = ModuleManifestUtils.getName(manifest); if (logger.isDebugEnabled()) logger.debug("Module name in module manifest: '" + jarFileModuleName + "'."); // Validate name if (add) { if ((jarFileModuleName == null) || (jarFileModuleName.trim().length() == 0)) throw new InvalidModuleNameException("Module name not specified!"); else if (super.getHotBeanModule(jarFileModuleName) != null) throw new ModuleAlreadyExistsException("Module name already exists!"); } else if (!moduleName.equals(jarFileModuleName)) throw new InvalidModuleNameException( "Module name in jar file doesn't match specified module name!"); moduleName = jarFileModuleName; moduleTempFileStream = new FileInputStream(moduleTempFile); if (add & logger.isInfoEnabled()) logger.info("Adding module '" + moduleName + "'."); fileLock = this.obtainRepositoryFileLock(false); // Obtain lock File moduleDirectory = new File(this.moduleRepositoryDirectory, moduleName); if (!moduleDirectory.exists()) moduleDirectory.mkdirs(); // Get next revision number revisionNumber = this.getLastRevisionOnFileSystem(moduleName); if (logger.isDebugEnabled()) { if (add) logger.debug("Adding module - last revision on file system: " + revisionNumber + "."); else logger.debug("Updating module - last revision on file system: " + revisionNumber + "."); } if (revisionNumber < 0) revisionNumber = 0; File moduleFile = new File(moduleDirectory, revisionNumber + MODULE_FILE_SUFFIX); while (moduleFile.exists()) // This should't really be necessary, but still... { revisionNumber++; moduleFile = new File(moduleDirectory, revisionNumber + MODULE_FILE_SUFFIX); } if (logger.isDebugEnabled()) { if (add) logger.debug("Adding module - revision of new module: " + revisionNumber + "."); else logger.debug("Updating module - revision of new module: " + revisionNumber + "."); } // Save module file FileCopyUtils.copy(moduleTempFileStream, new FileOutputStream(moduleFile)); // Deploy at once hotBeanModuleInfo = this.loadModule(moduleName, revisionNumber); } catch (Exception e) { String moduleNameString = ""; if (moduleName != null) moduleNameString = "'" + moduleName + "' "; if (add) { logger.error("Error adding module " + moduleNameString + "- " + e, e); if (e instanceof HotBeansException) throw (HotBeansException) e; else throw new HotBeansException("Error adding module " + moduleNameString + "- " + e, e); } else { logger.error("Error updating module " + moduleNameString + "- " + e, e); if (e instanceof HotBeansException) throw (HotBeansException) e; else throw new HotBeansException("Error updating module " + moduleNameString + "- " + e, e); } } finally { this.releaseRepositoryFileLock(fileLock); fileLock = null; if (moduleTempFileStream != null) { // Delete temp file try { moduleTempFileStream.close(); } catch (Exception e) { } } if (moduleTempFile != null) FileDeletor.delete(moduleTempFile); } } return hotBeanModuleInfo; }
From source file:com.espertech.esper.epl.fafquery.FireAndForgetQueryExec.java
public static Collection<EventBean> snapshot(FilterSpecCompiled optionalFilter, Annotation[] annotations, VirtualDWView virtualDataWindow, EventTableIndexRepository indexRepository, boolean queryPlanLogging, Log queryPlanLogDestination, String objectName, AgentInstanceContext agentInstanceContext) { if (optionalFilter == null || optionalFilter.getParameters().length == 0) { if (virtualDataWindow != null) { Pair<IndexMultiKey, EventTable> pair = virtualDataWindow .getFireAndForgetDesc(Collections.<String>emptySet(), Collections.<String>emptySet()); return virtualDataWindow.getFireAndForgetData(pair.getSecond(), new Object[0], new RangeIndexLookupValue[0], annotations); }//w ww . ja v a 2 s . c o m return null; } // Determine what straight-equals keys and which ranges are available. // Widening/Coercion is part of filter spec compile. Set<String> keysAvailable = new HashSet<String>(); Set<String> rangesAvailable = new HashSet<String>(); if (optionalFilter.getParameters().length == 1) { for (FilterSpecParam param : optionalFilter.getParameters()[0]) { if (!(param instanceof FilterSpecParamConstant || param instanceof FilterSpecParamRange || param instanceof FilterSpecParamIn)) { continue; } if (param.getFilterOperator() == FilterOperator.EQUAL || param.getFilterOperator() == FilterOperator.IS || param.getFilterOperator() == FilterOperator.IN_LIST_OF_VALUES) { keysAvailable.add(param.getLookupable().getExpression()); } else if (param.getFilterOperator().isRangeOperator() || param.getFilterOperator().isInvertedRangeOperator() || param.getFilterOperator().isComparisonOperator()) { rangesAvailable.add(param.getLookupable().getExpression()); } else if (param.getFilterOperator().isRangeOperator()) { rangesAvailable.add(param.getLookupable().getExpression()); } } } // Find an index that matches the needs Pair<IndexMultiKey, EventTableAndNamePair> tablePair; if (virtualDataWindow != null) { Pair<IndexMultiKey, EventTable> tablePairNoName = virtualDataWindow.getFireAndForgetDesc(keysAvailable, rangesAvailable); tablePair = new Pair<IndexMultiKey, EventTableAndNamePair>(tablePairNoName.getFirst(), new EventTableAndNamePair(tablePairNoName.getSecond(), null)); } else { IndexHint indexHint = IndexHint.getIndexHint(annotations); List<IndexHintInstruction> optionalIndexHintInstructions = null; if (indexHint != null) { optionalIndexHintInstructions = indexHint.getInstructionsFireAndForget(); } tablePair = indexRepository.findTable(keysAvailable, rangesAvailable, optionalIndexHintInstructions); } QueryPlanIndexHook hook = QueryPlanIndexHookUtil.getHook(annotations); if (queryPlanLogging && (queryPlanLogDestination.isInfoEnabled() || hook != null)) { String prefix = "Fire-and-forget from " + objectName + " "; String indexName = tablePair != null && tablePair.getSecond() != null ? tablePair.getSecond().getIndexName() : null; String indexText = indexName != null ? "index " + indexName + " " : "full table scan "; indexText += "(snapshot only, for join see separate query plan)"; if (tablePair == null) { queryPlanLogDestination.info(prefix + indexText); } else { queryPlanLogDestination .info(prefix + indexText + tablePair.getSecond().getEventTable().toQueryPlan()); } if (hook != null) { hook.fireAndForget( new QueryPlanIndexDescFAF(new IndexNameAndDescPair[] { new IndexNameAndDescPair(indexName, tablePair != null ? tablePair.getSecond().getEventTable().getClass().getSimpleName() : null) })); } } if (tablePair == null) { return null; // indicates table scan } // Compile key sets which contain key index lookup values String[] keyIndexProps = IndexedPropDesc.getIndexProperties(tablePair.getFirst().getHashIndexedProps()); boolean hasKeyWithInClause = false; Object[] keyValues = new Object[keyIndexProps.length]; for (int keyIndex = 0; keyIndex < keyIndexProps.length; keyIndex++) { for (FilterSpecParam param : optionalFilter.getParameters()[0]) { if (param.getLookupable().getExpression().equals(keyIndexProps[keyIndex])) { if (param.getFilterOperator() == FilterOperator.IN_LIST_OF_VALUES) { Object[] keyValuesList = ((MultiKeyUntyped) param.getFilterValue(null, agentInstanceContext)).getKeys(); if (keyValuesList.length == 0) { continue; } else if (keyValuesList.length == 1) { keyValues[keyIndex] = keyValuesList[0]; } else { keyValues[keyIndex] = keyValuesList; hasKeyWithInClause = true; } } else { keyValues[keyIndex] = param.getFilterValue(null, agentInstanceContext); } break; } } } // Analyze ranges - these may include key lookup value (EQUALS semantics) String[] rangeIndexProps = IndexedPropDesc.getIndexProperties(tablePair.getFirst().getRangeIndexedProps()); RangeIndexLookupValue[] rangeValues; if (rangeIndexProps.length > 0) { rangeValues = compileRangeLookupValues(rangeIndexProps, optionalFilter.getParameters()[0], agentInstanceContext); } else { rangeValues = new RangeIndexLookupValue[0]; } EventTable eventTable = tablePair.getSecond().getEventTable(); IndexMultiKey indexMultiKey = tablePair.getFirst(); // table lookup without in-clause if (!hasKeyWithInClause) { return fafTableLookup(virtualDataWindow, indexMultiKey, eventTable, keyValues, rangeValues, annotations); } // table lookup with in-clause: determine combinations Object[][] combinations = new Object[keyIndexProps.length][]; for (int i = 0; i < keyValues.length; i++) { if (keyValues[i] instanceof Object[]) { combinations[i] = (Object[]) keyValues[i]; } else { combinations[i] = new Object[] { keyValues[i] }; } } // enumerate combinations CombinationEnumeration enumeration = new CombinationEnumeration(combinations); HashSet<EventBean> events = new HashSet<EventBean>(); for (; enumeration.hasMoreElements();) { Object[] keys = enumeration.nextElement(); Collection<EventBean> result = fafTableLookup(virtualDataWindow, indexMultiKey, eventTable, keys, rangeValues, annotations); events.addAll(result); } return events; }
From source file:com.github.rosjava.huric.my_pub_sub_tutorial.Listener.java
@Override public void onStart(ConnectedNode connectedNode) { final Log log = connectedNode.getLog(); final String JENA = "/home/userk/catkin_ws/rosjava/src/huric/my_pub_sub_tutorial/src/main/java/com/github/huric/my_pub_sub_tutorial/"; final String SOURCE = "http://www.userk.co.uk/ontologies/ontology.owl"; final String NS = SOURCE + "#"; final String fileName = "ontology_modified.owl"; final String absoluteFileName = JENA + fileName; OntModel base = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM); OntDocumentManager dm = base.getDocumentManager(); dm.addAltEntry(SOURCE, "file:" + JENA + "ontology.owl"); base.read(SOURCE, "RDF/XML"); // Create an inference ontology model OntModel inf = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM_MICRO_RULE_INF, base); // create a dummy prof for this example OntClass lecturerClass = base.getOntClass(NS + "Lecturer"); System.out.println("\n\n---- Assertions in the data ----\n\n"); // list the asserted types Individual p1 = base.createIndividual(NS + "lec_ind", lecturerClass); for (Iterator<Resource> i = p1.listRDFTypes(false); i.hasNext();) { System.out.println(p1.getURI() + " is asserted in class " + i.next()); }//from ww w .j a v a2s .c o m System.out.println("\n\n---- Inferred Assertions ----\n\n"); // list the inferred types p1 = inf.getIndividual(NS + "lec_ind"); for (Iterator<Resource> i = p1.listRDFTypes(false); i.hasNext();) { System.out.println(p1.getURI() + " IS A " + i.next()); } System.out.println("\n\n---- List all subclasses of Lecturer Class ----\n\n"); for (Iterator<OntClass> i = lecturerClass.listSubClasses(); i.hasNext();) { OntClass c = i.next(); System.out.println(c.getURI()); } System.out.println("\n\n---- List all Instances of Lecturer ----\n\n"); lecturerClass = inf.getOntClass(NS + "Lecturer"); for (ExtendedIterator<? extends OntResource> i = lecturerClass.listInstances(); i.hasNext();) { System.out.println(i.next()); } System.out.println("\n\n---- Consistency Check ----\n\n"); FileWriter out = null; ValidityReport validity = inf.validate(); if (validity.isValid()) { System.out.println("Consistency Check:\n Passed\n"); System.out.println("Writing to file:\n\t" + fileName); try { out = new FileWriter(absoluteFileName); inf.write(out, "RDF/XML"); } catch (IOException a) { System.out.println(" Occhio"); } finally { if (out != null) { try { out.close(); } catch (IOException ex) { } } } } else { System.out.println("Consistency Check:\n Conflicts\n"); for (Iterator i = validity.getReports(); i.hasNext();) { System.out.println(" - " + i.next()); } } System.out.println("\n\n---- [ The End ] ----\n\n"); Subscriber<std_msgs.String> subscriber = connectedNode.newSubscriber("chatter", std_msgs.String._TYPE); subscriber.addMessageListener(new MessageListener<std_msgs.String>() { @Override public void onNewMessage(std_msgs.String message) { log.info("I heard: \"" + message.getData() + "\""); } }); }
From source file:de.tudarmstadt.ukp.clarin.webanno.crowdflower.NamedEntityTaskManager.java
/** * Uploads a new task2 to Crowdflower, producing all data entirely of the raw judgments file * retrieved from a task1 ID./*w w w . jav a 2s .co m*/ * * @param template the template. * @param jobID1 the job ID. * @param documentsJCas the documents. * @param goldsJCas the gold documents. * @return Crowdflower ID as string of the new task * @throws JsonProcessingException hum? * @throws IOException hum? * @throws CrowdException hum? */ public String uploadNewNERTask2(String template, String jobID1, List<JCas> documentsJCas, List<JCas> goldsJCas) throws JsonProcessingException, IOException, CrowdException { Log LOG = LogFactory.getLog(getClass()); omittedSentences = 0; // Reader that also downloades the raw judgments for the supplied job id BufferedReader br = getReaderForRawJudgments(jobID1); String line; // JSON object mapper ObjectMapper mapper = new ObjectMapper(); ObjectWriter writer = mapper.writer(); // Used to represent task2 data that is send as JSON to crowdflower Vector<NamedEntityTask2Data> uploadData = new Vector<NamedEntityTask2Data>(); // Judgments come in as a quite exotic multiline-json, we need to parse every line of it // separately while ((line = br.readLine()) != null) { // try to process each line, omit data if an error occurs (but inform user) try { JsonNode elem = mapper.readTree(line); String text = elem.path(JSON_FIELD_DATA).path(NamedEntityTask1Data.FIELD_TEXT).getTextValue(); String state = elem.path(JSON_FIELD_STATE).getTextValue(); // omit hidden gold items if (state.equals(JSON_VALUE_HIDDEN_GOLD)) { continue; } String document = elem.path(JSON_FIELD_DATA).path(JSON_FIELD_DOCUMENT).getTextValue(); int offset = elem.path(JSON_FIELD_DATA).path(NamedEntityTask1Data.FIELD_OFFSET).getIntValue(); if (state.equals(JSON_VALUE_GOLDEN)) { // produce gold data String markertext_gold = elem.path(JSON_FIELD_DATA) .path(NamedEntityTask1Data.FIELD_MARKERTEXT_GOLD).getTextValue(); String types = elem.path(JSON_FIELD_DATA).path(NamedEntityTask1Data.FIELD_TYPES).getTextValue(); if (!types.equals(JSON_VALUE_EMPTY_ARRAY)) { // sentence has atleast one NE List<String> NEtypes = Arrays.asList(types.substring(1, types.length() - 1).split(",")); JsonNode markers = mapper.readTree(markertext_gold); if (NEtypes.size() != markers.size()) { LOG.warn( "Warning, skipping ill formated gold item in task1! (NEtypes.size() != markers.size())"); continue; } int i = 0; for (JsonNode marker : markers) { int start = marker.path(JSON_FIELD_START_MARKER).getIntValue(); int end = marker.path(JSON_FIELD_END_MARKER).getIntValue(); NamedEntityTask2Data task2_gold_datum = new NamedEntityTask2Data(text, extractSpan(text, start, end), writer.writeValueAsString(marker), String.valueOf(getFirstSpanOffset(text)), document, task2NeMap.get(NEtypes.get(i)), bogusNER2Reason); task2_gold_datum.setDocOffset(offset); uploadData.add(task2_gold_datum); i++; } } // else ignore this sentence } else // normal data entry { if (!elem.path(JSON_FIELD_RESULTS).path(JSON_FIELD_JUDGMENTS).isMissingNode()) { Map<String, Integer> votings = new HashMap<String, Integer>(); // Majority voting for each marker in all judgments for (JsonNode judgment : elem.path(JSON_FIELD_RESULTS).path(JSON_FIELD_JUDGMENTS)) { if (!judgment.path(JSON_FIELD_DATA).path(NamedEntityTask1Data.FIELD_MARKERTEXT) .isMissingNode()) { String markertext = judgment.path(JSON_FIELD_DATA) .path(NamedEntityTask1Data.FIELD_MARKERTEXT).getTextValue(); JsonNode markers = mapper.readTree(markertext); // iterate over votes for (JsonNode marker : markers) { String voteText = writer.writeValueAsString(marker); // first case: add entry for this voting position if (!votings.containsKey(voteText)) { votings.put(voteText, 1); } // second case: increment voting else { votings.put(voteText, votings.get(voteText) + 1); } } } else { LOG.warn( "Warning, missing path in JSON result file from crowdflower: results/judgments"); } } // Consider any marked span which has at least two votes. Bogus spans can still be filtered out by task2 int votes_needed = 2; List<String> majorityMarkers = new ArrayList<String>(); for (String vote : votings.keySet()) { if (votings.get(vote) >= votes_needed) { majorityMarkers.add(vote); } } // process majority markers for (String strMarker : majorityMarkers) { if (!strMarker.equals(JSON_VALUE_NONE1) && !strMarker.equals(JSON_VALUE_NONE2)) { JsonNode marker = mapper.readTree(strMarker); int start = marker.path(JSON_FIELD_START_MARKER).getIntValue(); int end = marker.path(JSON_FIELD_END_MARKER).getIntValue(); NamedEntityTask2Data task2_datum = new NamedEntityTask2Data(text, extractSpan(text, start, end), strMarker, String.valueOf(getFirstSpanOffset(text)), document); task2_datum.setDocOffset(offset); uploadData.add(task2_datum); } } } else { LOG.warn("Warning, missing path in JSON result file from crowdflower: data/markertext"); } } } catch (Exception e) { omittedSentences++; LOG.warn("Warning, omitted a sentence from task2 upload because of an error in processing it: " + e.getMessage()); } } LOG.info("Data generation complete. Creating new Job for Ner task 2."); CrowdJob job = createJob(template); setAllowedCountries(job); crowdclient.updateAllowedCountries(job); LOG.info("Done, new job id is: " + job.getId() + ". Now generating data for NER task 2"); crowdclient.uploadData(job, uploadData); LOG.info("Done uploading data to task2 #" + job.getId() + "."); return job.getId(); }
From source file:byps.test.TestUtils.java
@SuppressWarnings("rawtypes") public static void internalAssertEquals(Log log, String msg, Object a, Object b, Set<Object> alreadyCheckedObjs) { if (a != null) { if (alreadyCheckedObjs.contains(a)) return; alreadyCheckedObjs.add(a);/*from w w w.j a va2s . c om*/ } if (b instanceof Date) { if (a == null) { a = new Date(0); } } try { if (a != b) { if (a == null && b != null) { if (!(b instanceof String)) { throw new AssertionError(msg + " a is null <> b is {" + b + "}"); } } else if (a != null && b == null) { if (!(a instanceof String)) { throw new AssertionError(msg + " a is {" + a + "} <> b is null"); } } else if (a instanceof boolean[]) { boolean[] x = (boolean[]) a; boolean[] y = (boolean[]) b; if (x.length != y.length) { throw new AssertionError(msg + " a[].length != b[].length"); } for (int i = 0; i < x.length; i++) { internalAssertEquals(log, msg + "[" + i + "]", x[i], y[i], alreadyCheckedObjs); } } else if (a instanceof byte[]) { byte[] x = (byte[]) a; byte[] y = (byte[]) b; if (x.length != y.length) { throw new AssertionError(msg + " a[].length != b[].length"); } for (int i = 0; i < x.length; i++) { internalAssertEquals(log, msg + "[" + i + "]", x[i], y[i], alreadyCheckedObjs); } } else if (a instanceof char[]) { char[] x = (char[]) a; char[] y = (char[]) b; if (x.length != y.length) { throw new AssertionError(msg + " a[].length != b[].length"); } for (int i = 0; i < x.length; i++) { internalAssertEquals(log, msg + "[" + i + "]", x[i], y[i], alreadyCheckedObjs); } } else if (a instanceof short[]) { short[] x = (short[]) a; short[] y = (short[]) b; if (x.length != y.length) { throw new AssertionError(msg + " a[].length != b[].length"); } for (int i = 0; i < x.length; i++) { internalAssertEquals(log, msg + "[" + i + "]", x[i], y[i], alreadyCheckedObjs); } } else if (a instanceof int[]) { int[] x = (int[]) a; int[] y = (int[]) b; if (x.length != y.length) { throw new AssertionError(msg + "a[].length != b[].length"); } for (int i = 0; i < x.length; i++) { internalAssertEquals(log, msg + "[" + i + "]", x[i], y[i], alreadyCheckedObjs); } } else if (a instanceof long[]) { long[] x = (long[]) a; long[] y = (long[]) b; if (x.length != y.length) { throw new AssertionError(msg + "a[].length != b[].length"); } for (int i = 0; i < x.length; i++) { internalAssertEquals(log, msg + "[" + i + "]", x[i], y[i], alreadyCheckedObjs); } } else if (a instanceof float[]) { float[] x = (float[]) a; float[] y = (float[]) b; if (x.length != y.length) { throw new AssertionError(msg + "a[].length != b[].length"); } for (int i = 0; i < x.length; i++) { internalAssertEquals(log, msg + "[" + i + "]", x[i], y[i], alreadyCheckedObjs); } } else if (a instanceof double[]) { double[] x = (double[]) a; double[] y = (double[]) b; if (x.length != y.length) { throw new AssertionError(msg + "a[].length != b[].length"); } for (int i = 0; i < x.length; i++) { internalAssertEquals(log, msg + "[" + i + "]", x[i], y[i], alreadyCheckedObjs); } } else if (a instanceof String[]) { String[] x = (String[]) a; String[] y = (String[]) b; if (x.length != y.length) { throw new AssertionError(msg + " a[].length != b[].length"); } for (int i = 0; i < x.length; i++) { internalAssertEquals(log, msg + "[" + i + "]", x[i], y[i], alreadyCheckedObjs); } } else if (a instanceof Object[]) { Object[] x = (Object[]) a; Object[] y = (Object[]) b; if (x.length != y.length) { throw new AssertionError(msg + "a[].length != b[].length"); } for (int i = 0; i < x.length; i++) { internalAssertEquals(log, msg + "[" + i + "]", x[i], y[i], alreadyCheckedObjs); } } else if (a instanceof List) { List x = (List) a; List y = (List) b; if (x.size() != y.size()) { throw new AssertionError(msg + "a[].length != b[].length"); } for (int i = 0; i < x.size(); i++) { internalAssertEquals(log, msg + "[" + i + "]", x.get(i), y.get(i), alreadyCheckedObjs); } } else if (a instanceof Set) { Set x = (Set) a; Set y = (Set) b; if (x.size() != y.size()) { throw new AssertionError(msg + "a[].length != b[].length"); } for (Iterator i = x.iterator(); i.hasNext();) { Object v = i.next(); if (y.contains(v)) continue; if (v.getClass() == PrimitiveTypes.class) { boolean found = false; for (Iterator j = y.iterator(); j.hasNext();) { found = compare((PrimitiveTypes) v, (PrimitiveTypes) j.next()); if (found) break; } Assert.assertTrue(msg + "[" + v + "] is missing", found); continue; } if (v.getClass() == Actor.class) { boolean found = false; for (Iterator j = y.iterator(); j.hasNext();) { found = compare((Actor) v, (Actor) j.next()); if (found) break; } Assert.assertTrue(msg + "[" + v + "] is missing", found); continue; } Assert.assertTrue(msg + "[" + v + "] is missing", y.contains(v)); } } else if (a instanceof Map) { Map x = (Map) a; Map y = (Map) b; if (x.size() != y.size()) { throw new AssertionError(msg + " a[].length != b[].length"); } for (Iterator i = x.keySet().iterator(); i.hasNext();) { Object k = i.next(); internalAssertEquals(log, msg + "[" + k + "]", x.get(k), y.get(k), alreadyCheckedObjs); } } else if (a instanceof InputStream) { // geht nicht, stream a wurde in BWire.putStream geschlossen } else if (a instanceof ByteBuffer) { ByteBuffer x = (ByteBuffer) a; ByteBuffer y = (ByteBuffer) b; Assert.assertEquals("ByteBuffer.remaining", x.remaining(), y.remaining()); for (int i = 0; i < x.remaining(); i++) { Assert.assertEquals("ByteBuffer.get(" + i + ")", x.get(), y.get()); } } else { Class<?> classA = a.getClass(); Class<?> classB = b.getClass(); if (classA != classB) { throw new AssertionError("different class"); } try { Class<?> clazz = classA; if (clazz == Boolean.class || clazz == Byte.class || clazz == Character.class || clazz == Short.class || clazz == Integer.class || clazz == Long.class || clazz == Float.class || clazz == Double.class || clazz == Date.class || clazz == String.class) { Assert.assertEquals(msg, a, b); } else if (clazz == Date.class) { assertEqualDates(log, msg, (Date) a, (Date) b); } else if (clazz == PrimitiveTypes.class) { if (!compare((PrimitiveTypes) a, (PrimitiveTypes) b)) { throw new AssertionError("different objects"); } } else if (clazz == Actor.class) { if (!compare((Actor) a, (Actor) b)) { throw new AssertionError("different objects"); } } else { for (Field f : clazz.getDeclaredFields()) { f.setAccessible(true); internalAssertEquals(log, msg + "." + f.getName(), f.get(a), f.get(b), alreadyCheckedObjs); } } } catch (IllegalAccessException e) { throw new AssertionError(e); } } } String astr = a != null ? a.toString() : null; String bstr = b != null ? b.toString() : null; if (astr != null && astr.length() > 100) astr = astr.substring(0, 100); if (bstr != null && bstr.length() > 100) bstr = bstr.substring(0, 100); if (log != null) log.info("assertEquals: " + msg + ", a=" + astr + ", b=" + bstr + ", true"); } catch (AssertionError e) { log.error("assertEquals: " + msg + ", a=" + a + ", b=" + b + ", false", e); throw e; } }
From source file:ome.logic.AdminImpl.java
@RolesAllowed("system") @Transactional(readOnly = false)/*from ww w .j a v a2s . c om*/ public void synchronizeLoginCache() { final Log log = getBeanHelper().getLogger(); final List<Map<String, Object>> dnIds = ldapUtil.lookupLdapAuthExperimenters(); if (dnIds.size() > 0) { log.info("Synchronizing " + dnIds.size() + " ldap user(s)"); } for (Map<String, Object> dnId : dnIds) { String dn = (String) dnId.get("dn"); Long id = (Long) dnId.get("experimenter_id"); try { Experimenter e = userProxy(id); ldapUtil.synchronizeLdapUser(e.getOmeName()); } catch (ApiUsageException aue) { // User likely doesn't exist log.debug("User not found: " + dn); } catch (Exception e) { log.error("synchronizeLdapUser:" + dnId, e); } } context.publishEvent(new UserGroupUpdateEvent(this)); }
From source file:ome.logic.AdminImpl.java
private void handleGroupChange(Long id, Permissions newPerms) { if (id == null) { throw new ApiUsageException("ID cannot be null"); } else if (newPerms == null) { throw new ApiUsageException("PERMS cannot be null"); }/* w w w .ja v a 2s . com*/ final Session s = osf.getSession(); final ExperimenterGroup group = (ExperimenterGroup) s.get(ExperimenterGroup.class, id); final Permissions oldPerms = group.getDetails().getPermissions(); if (oldPerms.sameRights(newPerms)) { getBeanHelper().getLogger().debug(String.format("Ignoring unchanged permissions: %s", newPerms)); return; } Role u = Role.USER; Role g = Role.GROUP; Role a = Role.WORLD; Right r = Right.READ; if (!newPerms.isGranted(u, r)) { throw new GroupSecurityViolation("Cannot remove user read: " + group); } else if (oldPerms.isGranted(g, r) && !newPerms.isGranted(g, r)) { throw new GroupSecurityViolation("Cannot remove group read: " + group); } else if (oldPerms.isGranted(a, r) && !newPerms.isGranted(a, r)) { throw new GroupSecurityViolation("Cannot remove world read: " + group); } final Long internal = (Long) Utils.internalForm(newPerms); final Log log = getBeanHelper().getLogger(); for (String className : classes()) { String table = table(className); if (table == null) { continue; } int changed = sql.changeTablePermissionsForGroup(table, id, internal); if (changed > 0) { log.info(String.format("# of perms changed for %s: %s", className, changed)); } } sql.changeGroupPermissions(id, internal); log.info(String.format("Changed permissions for %s to %s", id, internal)); }
From source file:ome.logic.LdapImpl.java
/** * The ids in "minus" will be removed from the ids in "base" and then * the operation chosen by "add" will be run on them. This method * ignores all methods known by Roles.//from w ww. j av a 2 s . c om * * @param e * @param base * @param minus * @param add */ private void modifyGroups(Experimenter e, Collection<Long> base, Collection<Long> minus, boolean add) { final Log log = getBeanHelper().getLogger(); Set<Long> ids = new HashSet<Long>(base); ids.removeAll(minus); // Take no actions on system/user group. ids.remove(roles.getSystemGroupId()); ids.remove(roles.getUserGroupId()); if (ids.size() > 0) { log.info(String.format("%s groups for %s: %s", add ? "Adding" : "Removing", e.getOmeName(), ids)); Set<ExperimenterGroup> grps = new HashSet<ExperimenterGroup>(); for (Long id : ids) { grps.add(new ExperimenterGroup(id, false)); } if (add) { provider.addGroups(e, grps.toArray(new ExperimenterGroup[0])); } else { provider.removeGroups(e, grps.toArray(new ExperimenterGroup[0])); } if (add) { // If we have just added groups, then it's possible that // the "user" groupis at the front of the list, in which // case we should assign another specific group. e = iQuery.get(Experimenter.class, e.getId()); log.debug("sizeOfGroupExperimenterMap=" + e.sizeOfGroupExperimenterMap()); if (e.sizeOfGroupExperimenterMap() > 1) { GroupExperimenterMap primary = e.getGroupExperimenterMap(0); GroupExperimenterMap next = e.getGroupExperimenterMap(1); log.debug("primary=" + primary.parent().getId()); log.debug("next=" + next.parent().getId()); if (primary.parent().getId().equals(roles.getUserGroupId())) { log.debug("calling setDefaultGroup"); provider.setDefaultGroup(e, next.parent()); } } } } }
From source file:ome.util.ReflectionUtils.java
public static void findFieldsOfClass(Class target, Object o, String path, Log log, Set done) { if (null == path || path.equals("")) { path = "\nthis"; }// www. j a v a 2 s . co m if (null == done) { done = new HashSet(); } if (done.contains(o)) { return; } done.add(o); if (target.isInstance(o)) { log.info(path + ";" + "\n----------------------\n" + o.toString() + " < " + o.getClass()); } else if (o instanceof Set) { for (Iterator it = ((Set) o).iterator(); it.hasNext();) { Object element = it.next(); findFieldsOfClass(target, element, path, log, done); } } else { Method[] accessors = getGettersAndSetters(o); log.debug(accessors); for (int i = 0; i < accessors.length; i++) { Method method = accessors[i]; if (method.getName().startsWith("get")) { log.debug("Trying " + method); Object obj = invokeGetter(o, method); if (null != obj) { findFieldsOfClass(target, obj, path + ".\n" + method.getName() + "()", log, done); } } } } }
From source file:org.acmsl.queryj.api.AbstractTemplate.java
/** * Logs a custom header./* w w w.ja va 2 s. co m*/ * @param header the header. */ protected void logHeader(@Nullable final String header) { @Nullable final Log t_Log = UniqueLogFactory.getLog(AbstractQueryJTemplate.class); if (t_Log != null) { t_Log.info(header); } }