Example usage for java.util.logging Logger finer

List of usage examples for java.util.logging Logger finer

Introduction

In this page you can find the example usage for java.util.logging Logger finer.

Prototype

public void finer(Supplier<String> msgSupplier) 

Source Link

Document

Log a FINER message, which is only to be constructed if the logging level is such that the message will actually be logged.

Usage

From source file:Logging.java

public static void main(String[] args) {
    Logger log = Logger.getLogger("global");

    log.finest("A");
    log.finer("B");
    log.fine("C");
    log.config("D");
    log.info("E");
    log.warning("O");
    log.severe("A");
}

From source file:MainClass.java

public static void main(String[] args) {
    Logger logger = Logger.getLogger("com.java2s.log");

    logger.severe("severe");
    logger.warning("warning");
    logger.info("info");
    logger.config("config");
    logger.fine("fine");
    logger.finer("finer");
    logger.finest("value =" + 42);
}

From source file:Main.java

public static void main(String[] argv) throws Exception {
    Logger logger = Logger.getLogger("com.mycompany");
    FileHandler fh = new FileHandler("mylog.txt");
    fh.setFormatter(new SimpleFormatter());
    logger.addHandler(fh);// w  w  w . jav a2 s  .co  m

    // fh = new FileHandler("mylog.xml");
    // fh.setFormatter(new XMLFormatter());
    // logger.addHandler(fh);

    // Log a few messages
    logger.severe("my severe message");
    logger.warning("my warning message");
    logger.info("my info message");
    logger.config("my config message");
    logger.fine("my fine message");
    logger.finer("my finer message");
    logger.finest("my finest message");
}

From source file:com.zacwolf.commons.crypto._CRYPTOfactory.java

public final _CRYPTOfactory test(Logger logger, String[] testargs) {
    try {/*from  ww  w.  j a  v  a 2 s . c o  m*/
        final File testfile = createTempFile("_CRYPTOfactory" + this.getType() + "_Test", ".tmp");

        logger.finer(this.getClass().getName() + ":TEST:" + this.getType() + ":.crypt(\"" + testargs[0]
                + "\"):RESULT:"
                + new String(this.decrypt(this.encrypt(testargs[0].getBytes(_CRYPTOfactory.ENCODING))),
                        _CRYPTOfactory.ENCODING));

        this.encryptToOutputStream(testargs[0].getBytes(_CRYPTOfactory.ENCODING),
                new FileOutputStream(testfile));
        logger.finer(this.getClass().getName() + ":TEST:" + this.getType() + ":.cryptToOutputStream(\""
                + testargs[0] + "\"):RESULT:" + new String(
                        this.decryptFromInputStream(new FileInputStream(testfile)), _CRYPTOfactory.ENCODING));
        testfile.delete();

        if (!this.getType().equalsIgnoreCase("RSA")) {
            this.encryptObjToOutputStream(new TestSerObj(testargs), new FileOutputStream(testfile));
            logger.finer("_CRYPTOfactory:TEST:" + this.getType() + ":.cryptObjToOutputStream:RESULT:"
                    + this.decryptObjFromInputStream(new FileInputStream(testfile)));
            logger.finer(this.getClass().getName() + ":TEST:Fully initialized " + this.crypter.getType()
                    + " cipher\n");
            testfile.delete();
        }
    } catch (Exception e) {
        if (e instanceof InvalidKeyException && this.crypter instanceof Crypter_RSA) {
            logger.fine("Unable to test an RSACypter with only a public key");
        } else {
            logger.severe(this.getClass().getName() + ":TEST:" + this.crypter.getType() + ":ERROR:" + e + "\n");
            e.printStackTrace();
        }
    } finally {
        logger.exiting(this.getClass().getName(), "test()", JVM.getMemoryStats());
    }
    return this;//So that it can be chained with the constructor call
}

From source file:birch.filter.EncryptionFilter.java

public EncryptionFilter(Filter filter, Config config) {
    super(filter);

    this.targets = new HashMap<String, Encryptor>();
    this.keyToTarget = new HashMap<String, String>();
    linebreakFilter = new LinebreakFilter(Filter.nullObject);

    keyParsers = new ArrayList();
    keyParsers.add(new OTPKeyParser());
    keyParsers.add(new ExtendedKeyParser());
    keyParsers.add(new DefaultKeyParser());

    encryptedPrefix = config.getProperties().get("prefixencrypted") == null ? ""
            : config.getProperties().get("prefixencrypted");
    plainPrefix = config.getProperties().get("prefixplain") == null ? ""
            : config.getProperties().get("prefixplain");
    ignorePrefix = config.getProperties().get("ignoreprefix") == null ? ""
            : config.getProperties().get("ignoreprefix");

    if (ignorePrefix.matches(".*[^\\\\]*(\\\\\\\\)*$")) {
        activeEncryptedPattern = Pattern
                .compile(MessageFormat.format("(?:{0})?{1}", ignorePrefix, encryptedPattern.pattern()));
    } else {/*from   www .jav  a  2 s  .  c  o  m*/
        activeEncryptedPattern = encryptedPattern;
    }

    Logger logger = Logger.getLogger(EncryptionFilter.class.getName());
    logger.finer("encryptedPrefix: " + encryptedPrefix);
    logger.finer("plainPrefix: " + plainPrefix);
    logger.finer("ignoreprefix: " + ignorePrefix);
    logger.finer("activeEncryptedPattern: " + activeEncryptedPattern.pattern());

    String key;
    Encryptor encryptor;
    Map<String, String> configTargets;
    configTargets = config.getTargets();

    for (String target : configTargets.keySet()) {
        try {

            key = configTargets.get(target);
            if (key != null) {
                encryptor = getEncryptor(key);
                if (encryptor != null) {
                    this.targets.put(target, encryptor);
                    this.keyToTarget.put(key, target);
                }
            }

        } catch (GeneralSecurityException ex) {
            Logger.getLogger(EncryptionFilter.class.getName()).log(Level.WARNING, ex.getLocalizedMessage());
        } catch (IOException ex) {
            Logger.getLogger(EncryptionFilter.class.getName()).log(Level.WARNING, ex.getLocalizedMessage());
        }
    }
}

From source file:es.upm.dit.gsi.barmas.dataset.utils.DatasetSplitter.java

/**
 * This method splits the original dataset in many small datasets for a
 * given number of agents./*w  w  w.  j ava 2  s.  c  o m*/
 * 
 * @param ratio
 *            0 < ratio < 1 -> Normally, 0.3 or 0.4 to build a test dataset
 *            with this percentage of the original data.
 * @param agents
 *            number of agents to split the original dataset
 * @param originalDatasetPath
 * @param outputDir
 * @param central
 *            true to create a bayescentral dataset that joint all agent
 *            data
 * @param scenario
 * @param iteration
 * @throws Exception
 */
public void splitDataset(double ratio, int agents, String originalDatasetPath, String outputDir,
        boolean central, String scenario, Logger logger, int iteration) {

    int ratioint = (int) (ratio * 100);
    double roundedratio = ((double) ratioint) / 100;
    String outputDirWithRatio = outputDir + "/" + roundedratio + "testRatio/iteration-" + iteration;
    File dir = new File(outputDirWithRatio);
    if (!dir.exists() || !dir.isDirectory()) {
        dir.mkdirs();
    }

    logger.finer("--> splitDataset()");
    logger.fine("Creating experiment.info...");
    this.createExperimentInfoFile(ratio, agents, originalDatasetPath, outputDirWithRatio, central, scenario,
            logger);

    try {
        // Look for essentials
        List<String[]> essentials = this.getEssentials(originalDatasetPath, logger);

        HashMap<String, CsvWriter> writers = new HashMap<String, CsvWriter>();
        CsvReader csvreader = new CsvReader(new FileReader(new File(originalDatasetPath)));

        csvreader.readHeaders();
        String[] headers = csvreader.getHeaders();
        int originalDatasetRowsCounter = 0;
        while (csvreader.readRecord()) {
            originalDatasetRowsCounter++;
        }
        csvreader.close();

        // Create datasets files

        // Central dataset
        if (central) {
            String fileName = outputDirWithRatio + File.separator + "bayes-central-dataset.csv";
            CsvWriter writer = new CsvWriter(new FileWriter(fileName), ',');
            writer.writeRecord(headers);
            writers.put("CENTRAL", writer);
            for (String[] essential : essentials) {
                writer.writeRecord(essential);
            }
            logger.fine("Bayes central dataset created.");
        }

        // Agent datasets
        String agentsDatasetsDir = outputDirWithRatio + File.separator + agents + "agents";
        File f = new File(agentsDatasetsDir);
        if (!f.isDirectory()) {
            f.mkdirs();
        }
        for (int i = 0; i < agents; i++) {
            String fileName = agentsDatasetsDir + File.separator + "agent-" + i + "-dataset.csv";
            CsvWriter writer = new CsvWriter(new FileWriter(fileName), ',');
            writer.writeRecord(headers);
            for (String[] essential : essentials) {
                writer.writeRecord(essential);
            }
            writers.put("AGENT" + i, writer);
            logger.fine("AGENT" + i + " dataset created.");
        }

        // Test dataset
        String fileName = outputDirWithRatio + File.separator + "test-dataset.csv";
        CsvWriter writer = new CsvWriter(new FileWriter(fileName), ',');
        writer.writeRecord(headers);
        writers.put("TEST", writer);
        logger.fine("Test dataset created.");

        // Create an ordering queue
        int testCases = (int) (ratio * originalDatasetRowsCounter);
        int testStep = originalDatasetRowsCounter / testCases;

        csvreader = new CsvReader(new FileReader(new File(originalDatasetPath)));

        csvreader.readHeaders();
        int stepCounter = 0 - (iteration % testStep);
        int agentCounter = 0;
        while (csvreader.readRecord()) {
            String[] row = csvreader.getValues();
            if (stepCounter % testStep == 0) {
                writer = writers.get("TEST");
                writer.writeRecord(row);
            } else {
                writer = writers.get("AGENT" + agentCounter);
                writer.writeRecord(row);
                writer = writers.get("CENTRAL");
                writer.writeRecord(row);
                agentCounter++;
                if (agentCounter == agents) {
                    agentCounter = 0;
                }
            }
            stepCounter++;
        }

        csvreader.close();
        for (CsvWriter w : writers.values()) {
            w.close();
        }

    } catch (Exception e) {
        logger.severe("Exception while splitting dataset. ->");
        logger.severe(e.getMessage());
        System.exit(1);
    }

    logger.finer("<-- splitDataset()");
}

From source file:com.google.enterprise.connector.sharepoint.client.SharepointClientContext.java

/**
 * Logs the excluded URL to the excluded_url log file.
 *
 * @param info the message to log//  ww w  .j  a va  2s  .c  om
 * @param loggers additional loggers to log the message to
 */
public void logExcludedURL(final String info, Logger... loggers) {
    logToFile(SPConstants.EXCLUDED_URL_LOG, info);
    for (Logger logger : loggers) {
        logger.finer(info);
    }
}

From source file:es.upm.dit.gsi.barmas.dataset.utils.DatasetSplitter.java

/**
 * @param folds//  w  w w.j  av  a  2 s .c o m
 * @param minAgents
 * @param maxAgents
 * @param originalDatasetPath
 * @param outputDir
 * @param scenario
 * @param logger
 */
public void splitDataset(int folds, int minAgents, int maxAgents, String originalDatasetPath, String outputDir,
        String scenario, Logger logger) {

    int ratioint = (int) ((1 / (double) folds) * 100);
    double roundedratio = ((double) ratioint) / 100;

    // Look for essentials
    List<String[]> essentials = this.getEssentials(originalDatasetPath, logger);

    for (int fold = 0; fold < folds; fold++) {
        String outputDirWithRatio = outputDir + "/" + roundedratio + "testRatio/iteration-" + fold;
        File dir = new File(outputDirWithRatio);
        if (!dir.exists() || !dir.isDirectory()) {
            dir.mkdirs();
        }

        logger.finer("--> splitDataset()");
        logger.fine("Creating experiment.info...");

        try {

            Instances originalData = this.getDataFromCSV(originalDatasetPath);

            originalData.randomize(new Random());
            originalData.stratify(folds);

            // TestDataSet
            Instances testData = originalData.testCV(folds, fold);
            CSVSaver saver = new CSVSaver();
            ArffSaver arffsaver = new ArffSaver();
            File file = new File(outputDirWithRatio + File.separator + "test-dataset.csv");
            if (!file.exists()) {
                saver.resetOptions();
                saver.setInstances(testData);
                saver.setFile(file);
                saver.writeBatch();
            }

            file = new File(outputDirWithRatio + File.separator + "test-dataset.arff");
            if (!file.exists()) {
                arffsaver.resetOptions();
                arffsaver.setInstances(testData);
                arffsaver.setFile(file);
                arffsaver.writeBatch();
            }

            // BayesCentralDataset
            Instances trainData = originalData.trainCV(folds, fold);
            file = new File(outputDirWithRatio + File.separator + "bayes-central-dataset.csv");
            if (!file.exists()) {
                saver.resetOptions();
                saver.setInstances(trainData);
                saver.setFile(file);
                saver.writeBatch();
                this.copyFileUsingApacheCommonsIO(file,
                        new File(
                                outputDirWithRatio + File.separator + "bayes-central-dataset-noEssentials.csv"),
                        logger);
                CsvWriter w = new CsvWriter(new FileWriter(file, true), ',');
                for (String[] essential : essentials) {
                    w.writeRecord(essential);
                }
                w.close();
            }
            file = new File(outputDirWithRatio + File.separator + "bayes-central-dataset.arff");
            if (!file.exists()) {
                arffsaver.resetOptions();
                arffsaver.setInstances(trainData);
                arffsaver.setFile(file);
                arffsaver.writeBatch();
                this.copyFileUsingApacheCommonsIO(file, new File(
                        outputDirWithRatio + File.separator + "bayes-central-dataset-noEssentials.arff"),
                        logger);
                CsvWriter w = new CsvWriter(new FileWriter(file, true), ',');
                for (String[] essential : essentials) {
                    w.writeRecord(essential);
                }
                w.close();
            }

            // Agent datasets
            CsvReader csvreader = new CsvReader(new FileReader(new File(originalDatasetPath)));
            csvreader.readHeaders();
            String[] headers = csvreader.getHeaders();
            csvreader.close();

            for (int agents = minAgents; agents <= maxAgents; agents++) {
                this.createExperimentInfoFile(folds, agents, originalDatasetPath, outputDirWithRatio, scenario,
                        logger);
                HashMap<String, CsvWriter> writers = new HashMap<String, CsvWriter>();
                String agentsDatasetsDir = outputDirWithRatio + File.separator + agents + "agents";
                HashMap<String, CsvWriter> arffWriters = new HashMap<String, CsvWriter>();
                File f = new File(agentsDatasetsDir);
                if (!f.isDirectory()) {
                    f.mkdirs();
                }
                Instances copy = new Instances(trainData);
                copy.delete();
                for (int i = 0; i < agents; i++) {
                    String fileName = agentsDatasetsDir + File.separator + "agent-" + i + "-dataset.csv";
                    file = new File(fileName);
                    if (!file.exists()) {
                        CsvWriter writer = new CsvWriter(new FileWriter(fileName), ',');
                        writer.writeRecord(headers);
                        writers.put("AGENT" + i, writer);
                    }
                    fileName = agentsDatasetsDir + File.separator + "agent-" + i + "-dataset.arff";
                    file = new File(fileName);
                    if (!file.exists()) {
                        arffsaver.resetOptions();
                        arffsaver.setInstances(copy);
                        arffsaver.setFile(new File(fileName));
                        arffsaver.writeBatch();
                        CsvWriter arffwriter = new CsvWriter(new FileWriter(fileName, true), ',');
                        arffWriters.put("AGENT" + i, arffwriter);
                    }

                    logger.fine("AGENT" + i + " dataset created in csv and arff formats.");
                }
                // Append essentials to all
                for (String[] essential : essentials) {
                    for (CsvWriter wr : writers.values()) {
                        wr.writeRecord(essential);
                    }
                    for (CsvWriter arffwr : arffWriters.values()) {
                        arffwr.writeRecord(essential);
                    }
                }

                int agentCounter = 0;
                for (int j = 0; j < trainData.numInstances(); j++) {
                    Instance instance = trainData.instance(j);
                    CsvWriter writer = writers.get("AGENT" + agentCounter);
                    CsvWriter arffwriter = arffWriters.get("AGENT" + agentCounter);
                    String[] row = new String[instance.numAttributes()];
                    for (int a = 0; a < instance.numAttributes(); a++) {
                        row[a] = instance.stringValue(a);
                    }
                    if (writer != null) {
                        writer.writeRecord(row);
                    }
                    if (arffwriter != null) {
                        arffwriter.writeRecord(row);
                    }
                    agentCounter++;
                    if (agentCounter == agents) {
                        agentCounter = 0;
                    }
                }

                for (CsvWriter wr : writers.values()) {
                    wr.close();
                }
                for (CsvWriter arffwr : arffWriters.values()) {
                    arffwr.close();
                }
            }

        } catch (Exception e) {
            logger.severe("Exception while splitting dataset. ->");
            logger.severe(e.getMessage());
            System.exit(1);
        }

        logger.finest("Dataset for fold " + fold + " created.");
    }

    logger.finer("<-- splitDataset()");

}

From source file:org.protempa.backend.dsb.relationaldb.RelationalDbDataSourceBackend.java

private void validate(KnowledgeSource knowledgeSource)
        throws KnowledgeSourceReadException, DataSourceBackendFailedConfigurationValidationException {
    List<EntitySpec> allSpecs = Arrays.asList(this.relationalDatabaseSpecBuilder.getEventSpecs(),
            this.relationalDatabaseSpecBuilder.getConstantSpecs(),
            this.relationalDatabaseSpecBuilder.getPrimitiveParameterSpecs());

    Logger logger = SQLGenUtil.logger();
    for (EntitySpec entitySpec : allSpecs) {
        String entitySpecName = entitySpec.getName();
        logger.log(Level.FINER, "Validating entity spec {0}", entitySpecName);
        String[] propIds = entitySpec.getPropositionIds();
        Set<String> propNamesFromPropSpecs = new HashSet<>();
        PropertySpec[] propSpecs = entitySpec.getPropertySpecs();
        logger.finer("Checking for duplicate properties");
        for (PropertySpec propSpec : propSpecs) {
            String propSpecName = propSpec.getName();
            if (!propNamesFromPropSpecs.add(propSpecName)) {
                throw new DataSourceBackendFailedConfigurationValidationException(
                        "Duplicate property name " + propSpecName + " in entity spec " + entitySpecName);
            }/*from w ww .  ja  va  2  s. c om*/
        }
        logger.finer("No duplicate properties found");
        logger.finer("Checking for invalid proposition ids and properties");
        Set<String> propNamesFromPropDefs = new HashSet<>();
        Set<String> invalidPropIds = new HashSet<>();
        for (String propId : propIds) {
            PropositionDefinition propDef = knowledgeSource.readPropositionDefinition(propId);
            if (propDef == null) {
                invalidPropIds.add(propId);
            }
            PropertyDefinition[] propertyDefs = propDef.getPropertyDefinitions();
            for (PropertyDefinition propertyDef : propertyDefs) {
                String propName = propertyDef.getId();
                propNamesFromPropDefs.add(propName);
            }
        }
        if (!invalidPropIds.isEmpty()) {
            throw new DataSourceBackendFailedConfigurationValidationException(
                    "Invalid proposition id(s) named in entity spec " + entitySpecName + ": '"
                            + StringUtils.join(invalidPropIds, "', '") + "'");
        }
        if (!propNamesFromPropSpecs.removeAll(propNamesFromPropDefs)) {
            throw new DataSourceBackendFailedConfigurationValidationException(
                    "Data model entity spec " + entitySpec.getName() + " has properties '"
                            + StringUtils.join(propNamesFromPropSpecs, "', '")
                            + "' that are not in the knowledge source's corresponding proposition definitions");
        }
        logger.finer("No invalid proposition ids or properties found");
    }
}

From source file:org.usrz.libs.logging.LevelDebugTest.java

@Test
public void testJavaLogging() {
    final java.util.logging.Logger logger = java.util.logging.Logger.getLogger(this.getClass().getName());

    logger.finest("Foobar FINEST");
    AppenderForTests.hasNoLastEvent("at Finest level");
    assertFalse(logger.isLoggable(java.util.logging.Level.FINEST));

    logger.finer("Foobar FINER");
    AppenderForTests.hasNoLastEvent("at Finer level");
    assertFalse(logger.isLoggable(java.util.logging.Level.FINER));

    logger.fine("Foobar FINE");
    AppenderForTests.hasLastEvent("at Fine level");
    assertTrue(logger.isLoggable(java.util.logging.Level.FINE));

    logger.config("Foobar CONFIG");
    AppenderForTests.hasLastEvent("at Config level");
    assertTrue(logger.isLoggable(java.util.logging.Level.CONFIG));

    logger.info("Foobar INFO");
    AppenderForTests.hasLastEvent("at Info level");
    assertTrue(logger.isLoggable(java.util.logging.Level.INFO));

    logger.warning("Foobar WARNING");
    AppenderForTests.hasLastEvent("at Warning level");
    assertTrue(logger.isLoggable(java.util.logging.Level.WARNING));

    logger.severe("Foobar SEVERE");
    AppenderForTests.hasLastEvent("at Severe level");
    assertTrue(logger.isLoggable(java.util.logging.Level.SEVERE));

}