Example usage for java.util.logging Logger log

List of usage examples for java.util.logging Logger log

Introduction

In this page you can find the example usage for java.util.logging Logger log.

Prototype

public void log(Level level, Supplier<String> msgSupplier) 

Source Link

Document

Log a message, which is only to be constructed if the logging level is such that the message will actually be logged.

Usage

From source file:org.apache.oodt.cas.pge.staging.FileStager.java

public void stageFiles(FileStagingInfo fileStagingInfo, PgeMetadata pgeMetadata, Logger logger)
        throws PGEException, CatalogException, URISyntaxException, IOException, ConnectionException,
        InstantiationException, DataTransferException {
    logger.log(Level.INFO, "Creating staging directory [" + fileStagingInfo.getStagingDir() + "]");
    new File(fileStagingInfo.getStagingDir()).mkdirs();
    for (String file : fileStagingInfo.getFilePaths()) {
        File fileHandle = new File(file);
        if (fileStagingInfo.isForceStaging() || !fileHandle.exists()) {
            logger.log(Level.INFO,
                    "Staging file [" + file + "] to directory [" + fileStagingInfo.getStagingDir() + "]");
            stageFile(asURI(file), new File(fileStagingInfo.getStagingDir()), pgeMetadata, logger);
        }/*from   w ww .  j a v a2  s  .c o  m*/
    }
    if (!fileStagingInfo.getProductIds().isEmpty()) {
        XmlRpcFileManagerClient fmClient = createFileManagerClient(pgeMetadata);
        for (String productId : fileStagingInfo.getProductIds()) {
            logger.log(Level.INFO, "Staging product [" + productId + "] to directory ["
                    + fileStagingInfo.getStagingDir() + "]");
            for (URI uri : getProductReferences(productId, fmClient)) {
                logger.log(Level.INFO, "Staging product [" + productId + "] reference [" + uri
                        + "] to directory [" + fileStagingInfo.getStagingDir() + "]");
                stageFile(uri, new File(fileStagingInfo.getStagingDir()), pgeMetadata, logger);
            }
        }
    }
}

From source file:org.kawanfw.test.api.server.config.TestSqlConfigurator.java

/**
 * The event will be logged as <code>Level.WARNING</code> in the
 * <code>user.home/.kawansoft/log/kawanfw.log</code> file
 *///from  w  w  w .jav  a2s  . c o  m
@Override
public void runIfStatementRefused(String username, Connection connection, String ipAddress, String sql,
        List<Object> parameterValues) throws IOException, SQLException {

    Logger logger = new DefaultCommonsConfigurator().getLogger();

    logger.log(Level.WARNING, "In TestSqlConfigurator: Client " + username + "(IP: " + ipAddress
            + ") has been denied executing sql statement: " + sql + " with parameters: " + parameterValues);

}

From source file:org.apache.tika.parser.geo.topic.GeoNameResolver.java

/**
 * Build the gazetteer index line by line
 * /*  www .  j  a  v  a  2  s. c  o  m*/
 * @param GAZETTEER_PATH
 *            path of the gazetter file
 * @throws IOException
 * @throws RuntimeException
 */
public void buildIndex(String GAZETTEER_PATH) throws IOException {
    File indexfile = new File(INDEXDIR_PATH);
    indexDir = FSDirectory.open(indexfile.toPath());
    if (!DirectoryReader.indexExists(indexDir)) {
        IndexWriterConfig config = new IndexWriterConfig(analyzer);
        indexWriter = new IndexWriter(indexDir, config);
        Logger logger = Logger.getLogger(this.getClass().getName());
        logger.log(Level.WARNING, "Start Building Index for Gazatteer");
        BufferedReader filereader = new BufferedReader(
                new InputStreamReader(new FileInputStream(GAZETTEER_PATH), "UTF-8"));
        String line;
        int count = 0;
        while ((line = filereader.readLine()) != null) {
            try {
                count += 1;
                if (count % 100000 == 0) {
                    logger.log(Level.INFO, "Indexed Row Count: " + count);
                }
                addDoc(indexWriter, line);

            } catch (RuntimeException re) {
                logger.log(Level.WARNING, "Skipping... Error on line: {}", line);
            }
        }
        logger.log(Level.WARNING, "Building Finished");
        filereader.close();
        indexWriter.close();
    }
}

From source file:streamcruncher.innards.db.DatabaseInterface.java

public void stop() throws Exception {
    if (privateVolatileInstance == true) {
        sentinelConnectionKeeper.cancel();

        /*/*from w ww. ja va  2s . c o  m*/
         * Hold on to this until the very end. Otherwise, the In-mem DBs
         * will shutdown.
         */
        Helper.closeConnection(sentinelConnection);
        sentinelConnection = null;
    }

    dataSource.close();
    dataSource = null;

    // ---------------------

    Logger logger = Registry.getImplFor(LoggerManager.class).getLogger(DatabaseInterface.class.getName());
    logger.log(Level.INFO, "Stopped");
}

From source file:streamcruncher.innards.db.DatabaseInterface.java

/**
 * {@inheritDoc} <code>params</code> requires the first parameter to be a
 * {@link java.util.Properties} object loaded with the necessary Database
 * properties.//from w w  w.  j  av  a 2s .co  m
 */
public void start(Object... params) throws Exception {
    this.properties = (Properties) params[0];

    String driver = properties.getProperty(ConfigKeys.DB.DRIVER_CLASS_NAME);
    String url = properties.getProperty(ConfigKeys.DB.DRIVER_URL);
    String user = properties.getProperty(ConfigKeys.DB.USER);
    String password = properties.getProperty(ConfigKeys.DB.PASSWORD);

    Class.forName(driver);

    // ----------------------

    schema = properties.getProperty(ConfigKeys.DB.SCHEMA);
    if (schema != null && schema.length() == 0) {
        schema = null;
    }

    String preservesArtifactsStr = properties.getProperty(ConfigKeys.DB.PRESERVES_ARTIFACTS_ON_SHUTDOWN);
    preservesArtifactsOnShutdown = Boolean.parseBoolean(preservesArtifactsStr);

    String maxPoolSizeStr = properties.getProperty(ConfigKeys.DB.CONNECTION_POOL_MAX_SIZE);
    int maxPoolSize = Integer.parseInt(maxPoolSizeStr);

    String privateVolatileInstanceStr = properties.getProperty(ConfigKeys.DB.PRIVATE_VOLATILE_INSTANCE);
    privateVolatileInstance = Boolean.parseBoolean(privateVolatileInstanceStr);

    dataSource = new BasicDataSource();
    dataSource.setDriverClassName(driver);
    dataSource.setUrl(url);
    dataSource.setUsername(user);
    dataSource.setPassword(password);
    dataSource.setMaxActive(maxPoolSize);
    dataSource.setMaxIdle(maxPoolSize);
    dataSource.setTestOnBorrow(false);
    dataSource.setTestOnReturn(false);
    dataSource.setTimeBetweenEvictionRunsMillis(2 * 60 * 1000);
    dataSource.setMinEvictableIdleTimeMillis(30 * 1000);
    dataSource.setAccessToUnderlyingConnectionAllowed(true);
    dataSource.setPoolPreparedStatements(true);

    setupLastStandingConnection();

    // ----------------------

    Logger logger = Registry.getImplFor(LoggerManager.class).getLogger(DatabaseInterface.class.getName());
    logger.log(Level.INFO, "Started");
}

From source file:org.apache.tika.parser.geo.topic.GeoNameResolver.java

/**
 * Search corresponding GeoName for each location entity
 * //from w ww. j a  va2  s  .com
 * @param querystr
 *            it's the NER actually
 * @return HashMap each name has a list of resolved entities
 * @throws IOException
 * @throws RuntimeException
 */

public HashMap<String, ArrayList<String>> searchGeoName(ArrayList<String> locationNameEntities)
        throws IOException {

    if (locationNameEntities.size() == 0 || locationNameEntities.get(0).length() == 0)
        return new HashMap<String, ArrayList<String>>();

    Logger logger = Logger.getLogger(this.getClass().getName());

    if (!DirectoryReader.indexExists(indexDir)) {
        logger.log(Level.SEVERE, "No Lucene Index Dierctory Found, Invoke indexBuild() First !");
        System.exit(1);
    }

    IndexReader reader = DirectoryReader.open(indexDir);

    if (locationNameEntities.size() >= 200)
        hitsPerPage = 5; // avoid heavy computation
    IndexSearcher searcher = new IndexSearcher(reader);

    Query q = null;

    HashMap<String, ArrayList<ArrayList<String>>> allCandidates = new HashMap<String, ArrayList<ArrayList<String>>>();

    for (String name : locationNameEntities) {

        if (!allCandidates.containsKey(name)) {
            try {
                // q = new QueryParser("name", analyzer).parse(name);
                q = new MultiFieldQueryParser(new String[] { "name", "alternatenames" }, analyzer).parse(name);
                TopScoreDocCollector collector = TopScoreDocCollector.create(hitsPerPage);
                searcher.search(q, collector);
                ScoreDoc[] hits = collector.topDocs().scoreDocs;
                ArrayList<ArrayList<String>> topHits = new ArrayList<ArrayList<String>>();

                for (int i = 0; i < hits.length; ++i) {
                    ArrayList<String> tmp1 = new ArrayList<String>();
                    ArrayList<String> tmp2 = new ArrayList<String>();
                    int docId = hits[i].doc;
                    Document d;
                    try {
                        d = searcher.doc(docId);
                        tmp1.add(d.get("name"));
                        tmp1.add(d.get("longitude"));
                        tmp1.add(d.get("latitude"));
                        if (!d.get("alternatenames").equalsIgnoreCase(d.get("name"))) {
                            tmp2.add(d.get("alternatenames"));
                            tmp2.add(d.get("longitude"));
                            tmp2.add(d.get("latitude"));
                        }
                    } catch (IOException e) {
                        e.printStackTrace();
                    }
                    topHits.add(tmp1);
                    if (tmp2.size() != 0)
                        topHits.add(tmp2);
                }
                allCandidates.put(name, topHits);
            } catch (org.apache.lucene.queryparser.classic.ParseException e) {
                e.printStackTrace();
            }
        }
    }

    HashMap<String, ArrayList<String>> resolvedEntities = new HashMap<String, ArrayList<String>>();
    pickBestCandidates(resolvedEntities, allCandidates);
    reader.close();

    return resolvedEntities;

}

From source file:org.openspaces.focalserver.FocalServer.java

/**
 * Logs registration events from the local mbeanserver
 *///  w w  w.j a  va 2s.  com
public void handleNotification(Notification notification, Object object) {
    if (notification instanceof MBeanServerNotification) {
        Logger logger = Logger.getLogger(FocalServer.class.getName());
        MBeanServerNotification mBeanServerNotification = (MBeanServerNotification) notification;

        if (mBeanServerNotification.getType().equals(MBeanServerNotification.REGISTRATION_NOTIFICATION)) {
            ObjectName beanName = mBeanServerNotification.getMBeanName();
            logger.log(Level.FINE, "Registered:" + beanName);
        } else {
            logger.log(Level.FINE, "Unregistered:" + mBeanServerNotification.getMBeanName());
        }
    }
}

From source file:org.jdesktop.wonderland.modules.service.InstallManager.java

/**
 * Removes an existing module, given its name. 
 *///from   ww  w. j  a  v  a2s  .c om
public void remove(String moduleName) {
    Logger logger = ModuleManager.getLogger();

    /*
     * Simply delete the directory associated with the module (quietly) and
     * remove from the list
     */
    File file = new File(this.installedFile, moduleName);
    try {
        FileUtils.deleteDirectory(file);
    } catch (IOException excp) {
        /* Log an error and continue */
        logger.log(Level.WARNING, "[MODULES] installed Failed to remove " + file.getAbsolutePath());
    }
    this.installedModules.remove(moduleName);
}

From source file:edu.harvard.iq.dataverse.harvest.client.HarvesterServiceBean.java

private void logBeginOaiHarvest(Logger hdLogger, HarvestingClient harvestingClient) {
    hdLogger.log(Level.INFO, "BEGIN HARVEST, oaiUrl=" + harvestingClient.getHarvestingUrl() + ",set="
            + harvestingClient.getHarvestingSet() + ", metadataPrefix=" + harvestingClient.getMetadataPrefix()
            + harvestingClient.getLastNonEmptyHarvestTime() == null ? ""
                    : "from=" + harvestingClient.getLastNonEmptyHarvestTime());
}

From source file:edu.harvard.iq.dataverse.harvest.client.HarvesterServiceBean.java

private void logCompletedOaiHarvest(Logger hdLogger, HarvestingClient harvestingClient) {
    hdLogger.log(Level.INFO, "COMPLETED HARVEST, oaiUrl=" + harvestingClient.getHarvestingUrl() + ",set="
            + harvestingClient.getHarvestingSet() + ", metadataPrefix=" + harvestingClient.getMetadataPrefix()
            + harvestingClient.getLastNonEmptyHarvestTime() == null ? ""
                    : "from=" + harvestingClient.getLastNonEmptyHarvestTime());
}