Example usage for java.util.logging Logger fine

List of usage examples for java.util.logging Logger fine

Introduction

In this page you can find the example usage for java.util.logging Logger fine.

Prototype

public void fine(Supplier<String> msgSupplier) 

Source Link

Document

Log a FINE message, which is only to be constructed if the logging level is such that the message will actually be logged.

Usage

From source file:com.ibm.ws.lars.rest.RepositoryRESTResourceLoggingTest.java

@Test
public void testGetAssetReviews(@Mocked final Logger logger, @Mocked final SecurityContext sc)
        throws InvalidIdException, NonExistentArtefactException {

    new Expectations() {
        {//  w  w w  .java 2s .c om
            logger.isLoggable(Level.FINE);
            result = true;
            logger.fine("getAssetReviews called with id of 'ffffffffffffffffffffffff'");
            sc.isUserInRole("Administrator");
            result = true;
        }
    };

    getRestResource().getAssetReviews(NON_EXISTENT_ID, dummyUriInfo, sc);
}

From source file:com.ibm.ws.lars.rest.RepositoryRESTResourceLoggingTest.java

@Test
public void testGetAttachmentContent(@Mocked final Logger logger, @Mocked final SecurityContext sc)
        throws InvalidIdException, NonExistentArtefactException {

    new Expectations() {
        {// www .  j  av  a 2s.co m
            logger.isLoggable(Level.FINE);
            result = true;

            logger.fine("getAttachmentContent called for assetId: " + NON_EXISTENT_ID + " attachmentId: "
                    + NON_EXISTENT_ID + " name: " + "no_name");
            sc.isUserInRole("Administrator");
            result = true;
        }
    };

    getRestResource().getAttachmentContent(NON_EXISTENT_ID, NON_EXISTENT_ID, "no_name", dummyUriInfo, sc);
}

From source file:com.ibm.ws.lars.rest.RepositoryRESTResourceLoggingTest.java

@Test
public void testCreateAttachmentNoContent(@Mocked final Logger logger) throws InvalidJsonAssetException,
        InvalidIdException, AssetPersistenceException, NonExistentArtefactException {

    new Expectations() {
        {/*from w w w .j  ava  2  s  .  co  m*/
            logger.isLoggable(Level.FINE);
            result = true;

            logger.fine("createAttachmentNoContent called, name: " + "name" + " assetId: " + NON_EXISTENT_ID
                    + " json content:\n" + "{}");
        }
    };

    getRestResource().createAttachmentNoContent("name", NON_EXISTENT_ID, null, "{}", dummyUriInfo);
}

From source file:jp.ikedam.jenkins.plugins.ldap_sasl.SearchUserDnResolver.java

/**
 * Resolve the user DN by querying the LDAP directory.
 * //from w  w w  .j  a v  a  2 s . co  m
 * @param ctx LDAP context, already authenticated.
 * @param username the username the user authenticated with.
 * 
 * @return the DN of the user.
 * @see jp.ikedam.jenkins.plugins.ldap_sasl.UserDnResolver#getUserDn(javax.naming.ldap.LdapContext, java.lang.String)
 */
@Override
public String getUserDn(LdapContext ctx, String username) {
    Logger logger = getLogger();
    if (StringUtils.isBlank(getSearchQueryTemplate())) {
        // not configured.
        logger.severe("Not configured.");

        return null;
    }

    try {
        SearchControls searchControls = new SearchControls();
        searchControls.setSearchScope(SearchControls.SUBTREE_SCOPE);
        logger.fine(String.format("Searching users base=%s, username=%s", getSearchBase(), username));
        String query = expandUsername(getSearchQueryTemplate(), username);
        NamingEnumeration<SearchResult> entries = ctx.search((getSearchBase() != null) ? getSearchBase() : "",
                query, searchControls);
        if (!entries.hasMore()) {
            // no entry.
            logger.severe(String.format("User not found: %s", username));
            return null;
        }

        String userDn = entries.next().getNameInNamespace();

        if (entries.hasMore()) {
            // more than one entry.
            logger.severe(String.format("User found more than one: %s", username));
            return null;
        }
        entries.close();

        return userDn;
    } catch (NamingException e) {
        logger.log(Level.SEVERE, "Failed to search a user", e);
        return null;
    }
}

From source file:com.ibm.ws.lars.rest.RepositoryRESTResourceLoggingTest.java

@Test
public void testUpdateAssetState(@Mocked final Logger logger)
        throws NonExistentArtefactException, RepositoryResourceLifecycleException {

    final String updateJson = "{\"action\":\"publish\"}";

    new Expectations() {
        {/*from  w  w w.  j  ava2 s  .  c om*/
            logger.isLoggable(Level.FINE);
            result = true;

            logger.fine("updateAssetState called for assetId: " + NON_EXISTENT_ID + " action: " + updateJson);
        }
    };

    getRestResource().updateAssetState(NON_EXISTENT_ID, updateJson);
}

From source file:com.ibm.ws.lars.rest.RepositoryRESTResourceLoggingTest.java

@Test
public void testCreateAttachmentWithContent(@Mocked final Logger logger,
        @Mocked final BufferedInMultiPart inMultiPart) throws InvalidJsonAssetException, InvalidIdException,
        AssetPersistenceException, NonExistentArtefactException {

    new Expectations() {
        {/*from   ww w  .j a v  a2s.  c  o m*/
            logger.isLoggable(Level.FINE);
            result = true;

            logger.fine("createAttachmentWithContent called, name: " + "name" + " assetId: " + NON_EXISTENT_ID);
        }
    };

    getRestResource().createAttachmentWithContent("name", NON_EXISTENT_ID, null, inMultiPart, dummyUriInfo);
}

From source file:com.ibm.ws.lars.rest.RepositoryRESTResourceLoggingTest.java

@Test
public void testCountAssets(@Mocked final Logger logger, @Mocked final UriInfo info, @Mocked SecurityContext sc)
        throws URISyntaxException, InvalidParameterException {

    new Expectations() {
        {/*from  w  w  w. j av a 2s .  c o m*/
            info.getQueryParameters(false);

            logger.isLoggable(Level.FINE);
            result = true;

            info.getRequestUri();
            result = new URI("http://localhost:9085/ma/v1/assets?foo=bar");

            logger.fine("countAssets called with query parameters: foo=bar");
        }
    };

    getRestResource().countAssets(info, sc);
}

From source file:com.ibm.ws.lars.rest.RepositoryRESTResourceLoggingTest.java

@Test
public void testGetAssets(@Mocked final Logger logger, @Mocked final UriInfo info,
        @Mocked SecurityContext context)
        throws URISyntaxException, JsonProcessingException, InvalidParameterException {

    new Expectations() {
        {//from w ww  .j a v  a 2s. com
            info.getQueryParameters(false);

            logger.isLoggable(Level.FINE);
            result = true;

            info.getRequestUri();
            result = new URI("http://localhost:9085/ma/v1/assets?foo=bar");

            logger.fine("getAssets called with query parameters: foo=bar");
        }
    };

    getRestResource().getAssets(info, context);
}

From source file:com.ibm.ws.lars.rest.RepositoryRESTResourceLoggingTest.java

@Test
public void testGetAssetFieldSummary(@Mocked final Logger logger, @Mocked final UriInfo info,
        @Mocked SecurityContext sc)//from   ww  w  .  j  a  v  a 2 s. c o  m
        throws URISyntaxException, JsonProcessingException, InvalidParameterException {

    final MultivaluedMapImpl<String, String> parameters = new MultivaluedMapImpl<>();
    parameters.add("fields", "xyz");

    new Expectations() {
        {
            info.getQueryParameters(false);
            result = parameters;

            logger.isLoggable(Level.FINE);
            result = true;

            info.getRequestUri();
            result = new URI("http://localhost:9085/ma/v1/assets/summary?fields=xyz");

            logger.fine("getAssetFieldSummary called with query parameters: fields=xyz");
        }
    };

    getRestResource().getAssetFieldSummary(info, sc);

}

From source file:es.upm.dit.gsi.barmas.dataset.utils.DatasetSplitter.java

/**
 * This method splits the original dataset in many small datasets for a
 * given number of agents./*  w ww  .j a  v a2  s .  c  o m*/
 * 
 * @param ratio
 *            0 < ratio < 1 -> Normally, 0.3 or 0.4 to build a test dataset
 *            with this percentage of the original data.
 * @param agents
 *            number of agents to split the original dataset
 * @param originalDatasetPath
 * @param outputDir
 * @param central
 *            true to create a bayescentral dataset that joint all agent
 *            data
 * @param scenario
 * @param iteration
 * @throws Exception
 */
public void splitDataset(double ratio, int agents, String originalDatasetPath, String outputDir,
        boolean central, String scenario, Logger logger, int iteration) {

    int ratioint = (int) (ratio * 100);
    double roundedratio = ((double) ratioint) / 100;
    String outputDirWithRatio = outputDir + "/" + roundedratio + "testRatio/iteration-" + iteration;
    File dir = new File(outputDirWithRatio);
    if (!dir.exists() || !dir.isDirectory()) {
        dir.mkdirs();
    }

    logger.finer("--> splitDataset()");
    logger.fine("Creating experiment.info...");
    this.createExperimentInfoFile(ratio, agents, originalDatasetPath, outputDirWithRatio, central, scenario,
            logger);

    try {
        // Look for essentials
        List<String[]> essentials = this.getEssentials(originalDatasetPath, logger);

        HashMap<String, CsvWriter> writers = new HashMap<String, CsvWriter>();
        CsvReader csvreader = new CsvReader(new FileReader(new File(originalDatasetPath)));

        csvreader.readHeaders();
        String[] headers = csvreader.getHeaders();
        int originalDatasetRowsCounter = 0;
        while (csvreader.readRecord()) {
            originalDatasetRowsCounter++;
        }
        csvreader.close();

        // Create datasets files

        // Central dataset
        if (central) {
            String fileName = outputDirWithRatio + File.separator + "bayes-central-dataset.csv";
            CsvWriter writer = new CsvWriter(new FileWriter(fileName), ',');
            writer.writeRecord(headers);
            writers.put("CENTRAL", writer);
            for (String[] essential : essentials) {
                writer.writeRecord(essential);
            }
            logger.fine("Bayes central dataset created.");
        }

        // Agent datasets
        String agentsDatasetsDir = outputDirWithRatio + File.separator + agents + "agents";
        File f = new File(agentsDatasetsDir);
        if (!f.isDirectory()) {
            f.mkdirs();
        }
        for (int i = 0; i < agents; i++) {
            String fileName = agentsDatasetsDir + File.separator + "agent-" + i + "-dataset.csv";
            CsvWriter writer = new CsvWriter(new FileWriter(fileName), ',');
            writer.writeRecord(headers);
            for (String[] essential : essentials) {
                writer.writeRecord(essential);
            }
            writers.put("AGENT" + i, writer);
            logger.fine("AGENT" + i + " dataset created.");
        }

        // Test dataset
        String fileName = outputDirWithRatio + File.separator + "test-dataset.csv";
        CsvWriter writer = new CsvWriter(new FileWriter(fileName), ',');
        writer.writeRecord(headers);
        writers.put("TEST", writer);
        logger.fine("Test dataset created.");

        // Create an ordering queue
        int testCases = (int) (ratio * originalDatasetRowsCounter);
        int testStep = originalDatasetRowsCounter / testCases;

        csvreader = new CsvReader(new FileReader(new File(originalDatasetPath)));

        csvreader.readHeaders();
        int stepCounter = 0 - (iteration % testStep);
        int agentCounter = 0;
        while (csvreader.readRecord()) {
            String[] row = csvreader.getValues();
            if (stepCounter % testStep == 0) {
                writer = writers.get("TEST");
                writer.writeRecord(row);
            } else {
                writer = writers.get("AGENT" + agentCounter);
                writer.writeRecord(row);
                writer = writers.get("CENTRAL");
                writer.writeRecord(row);
                agentCounter++;
                if (agentCounter == agents) {
                    agentCounter = 0;
                }
            }
            stepCounter++;
        }

        csvreader.close();
        for (CsvWriter w : writers.values()) {
            w.close();
        }

    } catch (Exception e) {
        logger.severe("Exception while splitting dataset. ->");
        logger.severe(e.getMessage());
        System.exit(1);
    }

    logger.finer("<-- splitDataset()");
}