Example usage for java.util.logging Logger fine

List of usage examples for java.util.logging Logger fine

Introduction

In this page you can find the example usage for java.util.logging Logger fine.

Prototype

public void fine(Supplier<String> msgSupplier) 

Source Link

Document

Log a FINE message, which is only to be constructed if the logging level is such that the message will actually be logged.

Usage

From source file:at.tuwien.ifs.somtoolbox.visualization.PMatrix.java

private int calculateParetoRadiusPercentile(InputVectorDistanceMatrix distances, DoubleMatrix1D percentiles) {

    // the paper describes the 18th percentile as a good start value for gaussian distributions
    int percentile = 18;
    double radius;

    // variables needed for the search
    int last_percentile = percentile;
    double diff = 0.0;
    double last_diff = 1.0;
    double median_size;
    boolean stop = false;
    double upper_size = 1.0;
    double lower_size = 0.0;

    // upper and lower search boundaries for the percentiles
    double upper_percentile = 50;
    double lower_percentile = 2;

    Logger log = Logger.getLogger("at.tuwien.ifs.somtoolbox");

    while (!stop) {
        // get current radius from the percentile
        radius = percentiles.getQuick(percentile);

        // compute densities with this radius
        DoubleMatrix1D densities = getAllDensities(distances, radius);

        // median percentage of points in spheres
        if (densities.size() != 0) {
            double median = VectorTools.median(densities.toArray());
            double mean = densities.zSum() / densities.size();
            log.info("Mean: " + mean + " median: " + median);
            median_size = Math.max(median, mean) / distances.columns();
        } else {//from  ww w . ja v  a 2  s. com
            median_size = 0;
        }
        log.fine("spheres for " + percentile + "%-tile contain on average " + Math.round(median_size * 100)
                + "% of the data");

        // compute difference of median size to the defined optimum
        diff = median_size - PARETO_SIZE;

        // stop if last step was 1, or the defined upper/lower stopping criterion is reached
        stop = Math.abs(percentile - last_percentile) == 1 || percentile == upper_percentile
                || percentile == lower_percentile;

        if (!stop) { // iterate
            last_percentile = percentile;
            last_diff = diff;

            // adjust percentile towards optimum with linear interpolation
            if (diff > 0) {
                upper_percentile = percentile;
                upper_size = median_size;
            } else {
                lower_percentile = percentile;
                lower_size = median_size;
            }

            // compute the estimated position of pareto size in the current search interval
            double pest = (PARETO_SIZE - lower_size) / (upper_size - lower_size)
                    * (upper_percentile - lower_percentile) + lower_percentile;

            // step towards the estimated position
            double step = pest - percentile;

            // always go at least 1 resp. -1
            if (step > 0) {
                step = Math.max(step, 1);
            } else {
                step = Math.min(step, -1);
            }
            percentile = percentile + (int) Math.round(step);
        } else {
            // if it is better, revert to the last percentile before we stopped
            if (Math.abs(diff) > Math.abs(last_diff)) {
                percentile = last_percentile;
            }
        }
    }

    log.info("P-Matrix: " + percentile + "%tile chosen.");
    return percentile;
}

From source file:es.upm.dit.gsi.barmas.dataset.utils.DatasetSplitter.java

/**
 * @param folds/*  w  ww . ja va 2 s.  co m*/
 * @param minAgents
 * @param maxAgents
 * @param originalDatasetPath
 * @param outputDir
 * @param scenario
 * @param logger
 */
public void splitDataset(int folds, int minAgents, int maxAgents, String originalDatasetPath, String outputDir,
        String scenario, Logger logger) {

    int ratioint = (int) ((1 / (double) folds) * 100);
    double roundedratio = ((double) ratioint) / 100;

    // Look for essentials
    List<String[]> essentials = this.getEssentials(originalDatasetPath, logger);

    for (int fold = 0; fold < folds; fold++) {
        String outputDirWithRatio = outputDir + "/" + roundedratio + "testRatio/iteration-" + fold;
        File dir = new File(outputDirWithRatio);
        if (!dir.exists() || !dir.isDirectory()) {
            dir.mkdirs();
        }

        logger.finer("--> splitDataset()");
        logger.fine("Creating experiment.info...");

        try {

            Instances originalData = this.getDataFromCSV(originalDatasetPath);

            originalData.randomize(new Random());
            originalData.stratify(folds);

            // TestDataSet
            Instances testData = originalData.testCV(folds, fold);
            CSVSaver saver = new CSVSaver();
            ArffSaver arffsaver = new ArffSaver();
            File file = new File(outputDirWithRatio + File.separator + "test-dataset.csv");
            if (!file.exists()) {
                saver.resetOptions();
                saver.setInstances(testData);
                saver.setFile(file);
                saver.writeBatch();
            }

            file = new File(outputDirWithRatio + File.separator + "test-dataset.arff");
            if (!file.exists()) {
                arffsaver.resetOptions();
                arffsaver.setInstances(testData);
                arffsaver.setFile(file);
                arffsaver.writeBatch();
            }

            // BayesCentralDataset
            Instances trainData = originalData.trainCV(folds, fold);
            file = new File(outputDirWithRatio + File.separator + "bayes-central-dataset.csv");
            if (!file.exists()) {
                saver.resetOptions();
                saver.setInstances(trainData);
                saver.setFile(file);
                saver.writeBatch();
                this.copyFileUsingApacheCommonsIO(file,
                        new File(
                                outputDirWithRatio + File.separator + "bayes-central-dataset-noEssentials.csv"),
                        logger);
                CsvWriter w = new CsvWriter(new FileWriter(file, true), ',');
                for (String[] essential : essentials) {
                    w.writeRecord(essential);
                }
                w.close();
            }
            file = new File(outputDirWithRatio + File.separator + "bayes-central-dataset.arff");
            if (!file.exists()) {
                arffsaver.resetOptions();
                arffsaver.setInstances(trainData);
                arffsaver.setFile(file);
                arffsaver.writeBatch();
                this.copyFileUsingApacheCommonsIO(file, new File(
                        outputDirWithRatio + File.separator + "bayes-central-dataset-noEssentials.arff"),
                        logger);
                CsvWriter w = new CsvWriter(new FileWriter(file, true), ',');
                for (String[] essential : essentials) {
                    w.writeRecord(essential);
                }
                w.close();
            }

            // Agent datasets
            CsvReader csvreader = new CsvReader(new FileReader(new File(originalDatasetPath)));
            csvreader.readHeaders();
            String[] headers = csvreader.getHeaders();
            csvreader.close();

            for (int agents = minAgents; agents <= maxAgents; agents++) {
                this.createExperimentInfoFile(folds, agents, originalDatasetPath, outputDirWithRatio, scenario,
                        logger);
                HashMap<String, CsvWriter> writers = new HashMap<String, CsvWriter>();
                String agentsDatasetsDir = outputDirWithRatio + File.separator + agents + "agents";
                HashMap<String, CsvWriter> arffWriters = new HashMap<String, CsvWriter>();
                File f = new File(agentsDatasetsDir);
                if (!f.isDirectory()) {
                    f.mkdirs();
                }
                Instances copy = new Instances(trainData);
                copy.delete();
                for (int i = 0; i < agents; i++) {
                    String fileName = agentsDatasetsDir + File.separator + "agent-" + i + "-dataset.csv";
                    file = new File(fileName);
                    if (!file.exists()) {
                        CsvWriter writer = new CsvWriter(new FileWriter(fileName), ',');
                        writer.writeRecord(headers);
                        writers.put("AGENT" + i, writer);
                    }
                    fileName = agentsDatasetsDir + File.separator + "agent-" + i + "-dataset.arff";
                    file = new File(fileName);
                    if (!file.exists()) {
                        arffsaver.resetOptions();
                        arffsaver.setInstances(copy);
                        arffsaver.setFile(new File(fileName));
                        arffsaver.writeBatch();
                        CsvWriter arffwriter = new CsvWriter(new FileWriter(fileName, true), ',');
                        arffWriters.put("AGENT" + i, arffwriter);
                    }

                    logger.fine("AGENT" + i + " dataset created in csv and arff formats.");
                }
                // Append essentials to all
                for (String[] essential : essentials) {
                    for (CsvWriter wr : writers.values()) {
                        wr.writeRecord(essential);
                    }
                    for (CsvWriter arffwr : arffWriters.values()) {
                        arffwr.writeRecord(essential);
                    }
                }

                int agentCounter = 0;
                for (int j = 0; j < trainData.numInstances(); j++) {
                    Instance instance = trainData.instance(j);
                    CsvWriter writer = writers.get("AGENT" + agentCounter);
                    CsvWriter arffwriter = arffWriters.get("AGENT" + agentCounter);
                    String[] row = new String[instance.numAttributes()];
                    for (int a = 0; a < instance.numAttributes(); a++) {
                        row[a] = instance.stringValue(a);
                    }
                    if (writer != null) {
                        writer.writeRecord(row);
                    }
                    if (arffwriter != null) {
                        arffwriter.writeRecord(row);
                    }
                    agentCounter++;
                    if (agentCounter == agents) {
                        agentCounter = 0;
                    }
                }

                for (CsvWriter wr : writers.values()) {
                    wr.close();
                }
                for (CsvWriter arffwr : arffWriters.values()) {
                    arffwr.close();
                }
            }

        } catch (Exception e) {
            logger.severe("Exception while splitting dataset. ->");
            logger.severe(e.getMessage());
            System.exit(1);
        }

        logger.finest("Dataset for fold " + fold + " created.");
    }

    logger.finer("<-- splitDataset()");

}

From source file:es.upm.dit.gsi.barmas.dataset.utils.DatasetSplitter.java

/**
 * @param originalDatasetPath/* www .  ja v  a 2 s  .c  om*/
 * @param scenario
 * @return
 */
private List<String[]> getEssentials(String originalDatasetPath, Logger logger) {
    // Find essentials
    List<String[]> essentials = new ArrayList<String[]>();
    HashMap<String, List<String>> nodesAndStates = new HashMap<String, List<String>>();
    try {
        // Look for all possible states
        Reader fr = new FileReader(originalDatasetPath);
        CsvReader reader = new CsvReader(fr);
        reader.readHeaders();
        String[] headers = reader.getHeaders();
        for (String header : headers) {
            nodesAndStates.put(header, new ArrayList<String>());
        }
        String[] values;
        while (reader.readRecord()) {
            values = reader.getValues();
            for (int i = 0; i < values.length; i++) {
                if (!nodesAndStates.get(headers[i]).contains(values[i])) {
                    nodesAndStates.get(headers[i]).add(values[i]);
                    if (!essentials.contains(values)) {
                        essentials.add(values);
                    }
                }
            }
        }

        reader.close();

        logger.fine("Number of Essentials: " + essentials.size());
    } catch (Exception e) {
        logger.severe(e.getMessage());
        System.exit(1);
    }
    return essentials;
}

From source file:jp.ikedam.jenkins.plugins.ldap_sasl.LdapSaslSecurityRealm.java

/**
 * Authorize a user./* w  w w .  j a v a2s.  c  om*/
 * 
 * @param username
 * @param password
 * @see hudson.security.AbstractPasswordBasedSecurityRealm#authenticate(java.lang.String, java.lang.String)
 */
@Override
protected UserDetails authenticate(String username, String password) throws AuthenticationException {
    Logger logger = getLogger();

    // check configuration.
    String ldapUris = getValidLdapUris();
    if (StringUtils.isBlank(ldapUris)) {
        logger.severe("No valid LDAP URI is specified.");
        throw new AuthenticationServiceException("No valid LDAP URI is specified.");
    }

    String mechanisms = getMechanisms();
    if (StringUtils.isBlank(mechanisms)) {
        logger.severe("No valid mechanism is specified.");
        throw new AuthenticationServiceException("No valid mechanism is specified.");
    }

    // TODO: Test with LDAPS.

    // Parameters for JNDI
    Hashtable<String, Object> env = new Hashtable<String, Object>();
    env.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory");
    env.put(Context.PROVIDER_URL, ldapUris);
    env.put(Context.SECURITY_PRINCIPAL, username);
    env.put(Context.SECURITY_CREDENTIALS, password);
    env.put(Context.SECURITY_AUTHENTICATION, mechanisms);
    env.put("com.sun.jndi.ldap.connect.timeout", Integer.toString(getConnectionTimeout()));
    env.put("com.sun.jndi.ldap.read.timeout", Integer.toString(getReadTimeout()));

    logger.fine("Authenticating with LDAP-SASL:");
    logger.fine(String.format("username=%s", username));
    logger.fine(String.format("servers=%s", ldapUris));
    logger.fine(String.format("mech=%s", mechanisms));

    LdapContext ctx = null;
    try {
        ctx = new InitialLdapContext(env, null);
    } catch (javax.naming.AuthenticationException e) {
        // Authentication Failure...
        throw new BadCredentialsException(String.format("Authentication failed: %s", username), e);
    } catch (NamingException e) {
        // Unexpected failure...
        throw new AuthenticationServiceException(String.format("Authentication failed: %s", username), e);
    }

    String userDn = (getUserDnResolver() != null) ? getUserDnResolver().getUserDn(ctx, username) : null;
    logger.fine(String.format("User DN is %s", userDn));

    List<GrantedAuthority> authorities = (getGroupResolver() != null)
            ? getGroupResolver().resolveGroup(ctx, userDn, username)
            : new ArrayList<GrantedAuthority>();

    logger.fine("Authenticating succeeded.");
    return new LdapUser(username, "", // password(not used)
            userDn, // dn of this user.
            true, // enabled
            true, // accountNonExpired
            true, // credentialsNonExpired
            true, // accountNonLocked
            authorities.toArray(new GrantedAuthority[0]));
}

From source file:com.zacwolf.commons.crypto._CRYPTOfactory.java

public final _CRYPTOfactory test(Logger logger, String[] testargs) {
    try {//  w w w . ja v  a2  s.c o m
        final File testfile = createTempFile("_CRYPTOfactory" + this.getType() + "_Test", ".tmp");

        logger.finer(this.getClass().getName() + ":TEST:" + this.getType() + ":.crypt(\"" + testargs[0]
                + "\"):RESULT:"
                + new String(this.decrypt(this.encrypt(testargs[0].getBytes(_CRYPTOfactory.ENCODING))),
                        _CRYPTOfactory.ENCODING));

        this.encryptToOutputStream(testargs[0].getBytes(_CRYPTOfactory.ENCODING),
                new FileOutputStream(testfile));
        logger.finer(this.getClass().getName() + ":TEST:" + this.getType() + ":.cryptToOutputStream(\""
                + testargs[0] + "\"):RESULT:" + new String(
                        this.decryptFromInputStream(new FileInputStream(testfile)), _CRYPTOfactory.ENCODING));
        testfile.delete();

        if (!this.getType().equalsIgnoreCase("RSA")) {
            this.encryptObjToOutputStream(new TestSerObj(testargs), new FileOutputStream(testfile));
            logger.finer("_CRYPTOfactory:TEST:" + this.getType() + ":.cryptObjToOutputStream:RESULT:"
                    + this.decryptObjFromInputStream(new FileInputStream(testfile)));
            logger.finer(this.getClass().getName() + ":TEST:Fully initialized " + this.crypter.getType()
                    + " cipher\n");
            testfile.delete();
        }
    } catch (Exception e) {
        if (e instanceof InvalidKeyException && this.crypter instanceof Crypter_RSA) {
            logger.fine("Unable to test an RSACypter with only a public key");
        } else {
            logger.severe(this.getClass().getName() + ":TEST:" + this.crypter.getType() + ":ERROR:" + e + "\n");
            e.printStackTrace();
        }
    } finally {
        logger.exiting(this.getClass().getName(), "test()", JVM.getMemoryStats());
    }
    return this;//So that it can be chained with the constructor call
}

From source file:edu.harvard.iq.dataverse.harvest.client.HarvesterServiceBean.java

@TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED)
public Long processRecord(DataverseRequest dataverseRequest, Logger hdLogger, PrintWriter importCleanupLog,
        OaiHandler oaiHandler, String identifier, MutableBoolean recordErrorOccurred,
        MutableLong processedSizeThisBatch, List<String> deletedIdentifiers) {
    String errMessage = null;//  ww w . j  a  va 2  s .c  o  m
    Dataset harvestedDataset = null;
    logGetRecord(hdLogger, oaiHandler, identifier);
    File tempFile = null;

    try {
        FastGetRecord record = oaiHandler.runGetRecord(identifier);
        errMessage = record.getErrorMessage();

        if (errMessage != null) {
            hdLogger.log(Level.SEVERE, "Error calling GetRecord - " + errMessage);
        } else if (record.isDeleted()) {
            hdLogger.info(
                    "Deleting harvesting dataset for " + identifier + ", per the OAI server's instructions.");

            Dataset dataset = datasetService
                    .getDatasetByHarvestInfo(oaiHandler.getHarvestingClient().getDataverse(), identifier);
            if (dataset != null) {
                hdLogger.info("Deleting dataset " + dataset.getGlobalId());
                deleteHarvestedDataset(dataset, dataverseRequest, hdLogger);
                // TODO: 
                // check the status of that Delete - see if it actually succeeded
                deletedIdentifiers.add(identifier);
            } else {
                hdLogger.info("No dataset found for " + identifier + ", skipping delete. ");
            }

        } else {
            hdLogger.info("Successfully retrieved GetRecord response.");

            tempFile = record.getMetadataFile();
            PrintWriter cleanupLog;
            harvestedDataset = importService.doImportHarvestedDataset(dataverseRequest,
                    oaiHandler.getHarvestingClient(), identifier, oaiHandler.getMetadataPrefix(),
                    record.getMetadataFile(), importCleanupLog);

            hdLogger.fine("Harvest Successful for identifier " + identifier);
            hdLogger.fine("Size of this record: " + record.getMetadataFile().length());
            processedSizeThisBatch.add(record.getMetadataFile().length());
        }
    } catch (Throwable e) {
        logGetRecordException(hdLogger, oaiHandler, identifier, e);
        errMessage = "Caught exception while executing GetRecord on " + identifier;
        //logException(e, hdLogger);

    } finally {
        if (tempFile != null) {
            // temporary - let's not delete the temp metadata file if anything went wrong, for now:
            if (errMessage == null) {
                try {
                    tempFile.delete();
                } catch (Throwable t) {
                }
                ;
            }
        }
    }

    // TODO: the message below is taken from DVN3; - figure out what it means...
    // 
    // If we got an Error from the OAI server or an exception happened during import, then
    // set recordErrorOccurred to true (if recordErrorOccurred is being used)
    // otherwise throw an exception (if recordErrorOccurred is not used, i.e null)

    if (errMessage != null) {
        if (recordErrorOccurred != null) {
            recordErrorOccurred.setValue(true);
        } else {
            throw new EJBException(errMessage);
        }
    }

    return harvestedDataset != null ? harvestedDataset.getId() : null;
}

From source file:org.apache.sling.commons.log.logback.integration.ITJULIntegration.java

/**
 * Checks the default settings. It runs the bundle with minimum dependencies
 *///from   w ww  .  j a  v  a2 s  .c o m
@Test
public void testJULLogging() throws Exception {
    java.util.logging.Logger julLogger = java.util.logging.Logger.getLogger("foo.jul.1");
    org.slf4j.Logger slf4jLogger = LoggerFactory.getLogger("foo.jul.1");

    assertEquals(java.util.logging.Level.FINEST, julLogger.getLevel());
    assertTrue(slf4jLogger.isTraceEnabled());

    // Now add an appender and see if JUL logs are handled
    TestAppender ta = new TestAppender();
    Dictionary<String, Object> props = new Hashtable<String, Object>();

    String[] loggers = { "foo.jul.1", };
    ch.qos.logback.classic.Logger bar = (ch.qos.logback.classic.Logger) LoggerFactory.getLogger(loggers[0]);
    bar.setLevel(Level.INFO);

    props.put("loggers", loggers);
    ServiceRegistration sr = bundleContext.registerService(Appender.class.getName(), ta, props);

    delay();

    // Level should be INFO now
    assertEquals(java.util.logging.Level.INFO, julLogger.getLevel());

    julLogger.info("Info message");
    julLogger.fine("Fine message");

    assertEquals(1, ta.events.size());

}

From source file:org.apache.sling.extensions.logback.integration.ITJULIntegration.java

/**
 * Checks the default settings. It runs the bundle with minimum dependencies
 *//* ww  w . j av  a 2 s  .  co m*/
@Test
public void testJULLogging() throws Exception {
    java.util.logging.Logger julLogger = java.util.logging.Logger.getLogger("foo.jul.1");
    org.slf4j.Logger slf4jLogger = LoggerFactory.getLogger("foo.jul.1");

    assertEquals(java.util.logging.Level.FINEST, julLogger.getLevel());
    assertTrue(slf4jLogger.isTraceEnabled());

    //Now add an appender and see if JUL logs are handled
    TestAppender ta = new TestAppender();
    Dictionary<String, Object> props = new Hashtable<String, Object>();

    String[] loggers = { "foo.jul.1:INFO", };

    props.put("loggers", loggers);
    ServiceRegistration sr = bundleContext.registerService(Appender.class.getName(), ta, props);

    delay();

    //Level should be INFO now
    assertEquals(java.util.logging.Level.INFO, julLogger.getLevel());

    julLogger.info("Info message");
    julLogger.fine("Fine message");

    assertEquals(1, ta.events.size());

}

From source file:org.jdesktop.wonderland.modules.service.InstallManager.java

/**
 * Adds a new module to installed. This simply copies files, it assumes all
 * preparations or checks have already been performed. It is given the
 * module and the File root of where to copy and returns the Module object
 * representing the installed module// www. j  av a2  s . c o m
 */
public Module add(String moduleName, File root) {
    /* The error logger */
    Logger logger = ModuleManager.getLogger();

    /*
     * Expand the contents of the module to the installed/ directory. First
     * create a directory holding the module (but check first if it already
     * exists and log a warning message).
     */
    File file = new File(this.installedFile, moduleName);
    if (ModuleManagerUtils.makeCleanDirectory(file) == false) {
        logger.log(Level.WARNING, "[MODULES] INSTALL Failed to Create " + file.getAbsolutePath());
        return null;
    }

    /* Next, expand the contents of the module into this directory */
    try {
        FileUtils.copyDirectory(root, file);
    } catch (java.io.IOException excp) {
        logger.log(Level.WARNING,
                "[MODULES] INSTALL Failed to Copy " + root.getAbsolutePath() + " To " + file.getAbsolutePath(),
                excp);
        return null;
    }

    /* Re-open module in the installed directory, add to the list */
    Module module = null;
    try {
        module = ModuleFactory.open(file);
        this.installedModules.put(moduleName, module);

        if (logger.isLoggable(Level.FINE)) {
            logger.fine("Add installed module " + module);
        }
    } catch (java.lang.Exception excp) {
        /* Log the error and return false */
        logger.log(Level.WARNING, "[MODULES] PENDING Failed to Open Module", excp);
        return null;
    }
    return module;
}

From source file:org.jdesktop.wonderland.modules.service.InstallManager.java

/**
 * Returns a map of module names and objects from a given directory. If no
 * modules are present, this method returns an empty map.
 * // ww  w.j a v  a2  s .  c o m
 * @return An map of unique module names and their Module objects
 */
private Map<String, Module> fetchModules() {
    Logger logger = ModuleManager.getLogger();
    Map<String, Module> map = new HashMap<String, Module>();

    /*
     * Loop through each file and check that it is potentially valid.
     * If so, add its name to the map of module names
     */
    File[] files = this.installedFile.listFiles();
    for (File file : files) {
        /* Attempt to create the module */
        try {
            Module module = ModuleFactory.open(file);
            map.put(module.getName(), module);

            if (logger.isLoggable(Level.FINE)) {
                logger.fine("Load installed module " + module);
            }
        } catch (java.lang.Exception excp) {
            ModuleManager.getLogger().log(Level.WARNING, "[MODULES] Invalid module " + file, excp);
        }
    }
    return map;
}