Example usage for java.util.logging Level FINEST

List of usage examples for java.util.logging Level FINEST

Introduction

In this page you can find the example usage for java.util.logging Level FINEST.

Prototype

Level FINEST

To view the source code for java.util.logging Level FINEST.

Click Source Link

Document

FINEST indicates a highly detailed tracing message.

Usage

From source file:com.archivas.clienttools.arcutils.model.ArcMoverDirectory.java

public List<ArcMoverFile> getFileList(int maxFiles) throws StorageAdapterException, JobException {
    badElementCnt = 0;//from   w  w  w . j  a  va2  s. co m
    int count = 0;
    ArrayList<ArcMoverFile> files = new ArrayList<ArcMoverFile>();
    Iterator<ArcMoverFile> iter = getFileListIterator(true, profile.supportsVersioning());

    while (iter.hasNext() && count++ < maxFiles) {
        try {
            ArcMoverFile f = iter.next();
            assert (f != null); // Expecting the iterator to throw NoSuchElementException
                                // instead of returning null;
            files.add(f);
        } catch (NoSuchElementException e) {
            // decrement count
            --count;
            // This can happen if the adapter sees things it can't deal with, e.g., symlinks on
            // a LFS.
            LOG.log(Level.FINEST, "Unsupported object encountered, skipping.", e);
        } catch (BadElementException bee) {
            // we will not include this element, but other elements should be included
            --count;
            badElementCnt++;
            LOG.log(Level.FINEST, "Bad object encountered, skipping.", bee);
        }
    }

    return files;
}

From source file:be.fedict.eidviewer.lib.file.imports.Version35XMLFile.java

@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
        throws SAXException {
    logger.log(Level.FINEST, "<{0}>", localName);
    if (localName.equalsIgnoreCase(STAGE.BIOGRAPHIC.getState()))
        stage = STAGE.BIOGRAPHIC;//from w ww .j  a v  a  2 s  . co m
    else if (localName.equalsIgnoreCase(STAGE.BIOMETRIC.getState()))
        stage = STAGE.BIOMETRIC;
    else if (localName.equalsIgnoreCase(STAGE.BIOMETRIC_PICTURE.getState()))
        stage = STAGE.BIOMETRIC_PICTURE;
    else if (localName.equalsIgnoreCase(STAGE.CRYPTOGRAPHIC.getState()))
        stage = STAGE.CRYPTOGRAPHIC;
}

From source file:hudson.plugins.plot.CSVSeries.java

/**
 * This function loads the set of columns that should be included or
 * excluded.//ww w  . j a v  a  2s .  c om
 */
private void loadExclusionSet() {
    if (inclusionFlag == InclusionFlag.OFF)
        return;

    if (exclusionValues == null) {
        inclusionFlag = InclusionFlag.OFF;
        return;
    }

    switch (inclusionFlag) {
    case INCLUDE_BY_STRING:
    case EXCLUDE_BY_STRING:
        strExclusionSet = new HashSet<String>();
        break;

    case INCLUDE_BY_COLUMN:
    case EXCLUDE_BY_COLUMN:
        colExclusionSet = new HashSet<Integer>();
        break;
    }

    for (String str : PAT_COMMA.split(exclusionValues)) {
        if (str == null || str.length() <= 0)
            continue;

        switch (inclusionFlag) {
        case INCLUDE_BY_STRING:
        case EXCLUDE_BY_STRING:
            if (LOGGER.isLoggable(Level.FINEST))
                LOGGER.finest(inclusionFlag + " CSV Column: " + str);
            strExclusionSet.add(str);
            break;

        case INCLUDE_BY_COLUMN:
        case EXCLUDE_BY_COLUMN:
            try {
                if (LOGGER.isLoggable(Level.FINEST))
                    LOGGER.finest(inclusionFlag + " CSV Column: " + str);
                colExclusionSet.add(Integer.valueOf(str));
            } catch (NumberFormatException nfe) {
                LOGGER.log(Level.SEVERE, "Exception converting to integer", nfe);
            }
            break;
        }
    }
}

From source file:org.apache.myfaces.ov2021.application.jsp.JspStateManagerImpl.java

/**
 * Return an object which contains info about the UIComponent type
 * of each node in the view tree. This allows an identical UIComponent
 * tree to be recreated later, though all the components will have
 * just default values for their members.
 *///from   ww w .j a v a2s .  c  o m
@Override
protected Object getTreeStructureToSave(FacesContext facesContext) {
    if (log.isLoggable(Level.FINEST))
        log.finest("Entering getTreeStructureToSave");
    UIViewRoot viewRoot = facesContext.getViewRoot();
    if (viewRoot.isTransient()) {
        return null;
    }
    TreeStructureManager tsm = new TreeStructureManager();
    Object retVal = tsm.buildTreeStructureToSave(viewRoot);
    if (log.isLoggable(Level.FINEST))
        log.finest("Exiting getTreeStructureToSave");
    return retVal;
}

From source file:com.ibm.jaggr.core.impl.layer.LayerImpl.java

@SuppressWarnings("unchecked")
@Override/*from w  w w. j av  a  2s .co m*/
public InputStream getInputStream(HttpServletRequest request, HttpServletResponse response) throws IOException {

    CacheEntry entry = null;
    String key = null;
    IAggregator aggr = (IAggregator) request.getAttribute(IAggregator.AGGREGATOR_REQATTRNAME);
    List<String> cacheInfoReport = null;
    if (_isReportCacheInfo) {
        cacheInfoReport = (List<String>) request.getAttribute(LAYERCACHEINFO_PROPNAME);
        if (cacheInfoReport != null) {
            cacheInfoReport.clear();
        }
    }
    if (log.isLoggable(Level.FINEST) && cacheInfoReport == null) {
        cacheInfoReport = new LinkedList<String>();
    }
    try {
        IOptions options = aggr.getOptions();
        ICacheManager mgr = aggr.getCacheManager();
        boolean ignoreCached = RequestUtil.isIgnoreCached(request);
        InputStream result;
        long lastModified = getLastModified(request);
        CacheEntry newEntry = new CacheEntry(_id, _cacheKey, lastModified);
        CacheEntry existingEntry = null;

        if (ignoreCached) {
            request.setAttribute(NOCACHE_RESPONSE_REQATTRNAME, Boolean.TRUE);
        }
        if (options.isDevelopmentMode()) {
            synchronized (this) {
                // See if we need to discard previously built LayerBuilds
                if (lastModified > _lastModified) {
                    if (cacheInfoReport != null) {
                        cacheInfoReport.add("update_lastmod2"); //$NON-NLS-1$
                    }
                    if (lastModified != Long.MAX_VALUE) {
                        // max value means missing requested source
                        _lastModified = lastModified;
                    }
                    _cacheKeyGenerators = null;
                }
            }
        }
        Map<String, ICacheKeyGenerator> cacheKeyGenerators = _cacheKeyGenerators;

        // Creata a cache key.
        key = generateCacheKey(request, cacheKeyGenerators);

        if (!ignoreCached && key != null) {
            int loopGuard = 5;
            do {
                // Try to retrieve an existing layer build using the blocking putIfAbsent.  If the return
                // value is null, then the newEntry was successfully added to the map, otherwise the
                // existing entry is returned in the buildReader and newEntry was not added.
                existingEntry = _layerBuilds.putIfAbsent(key, newEntry, options.isDevelopmentMode());
                if (cacheInfoReport != null) {
                    cacheInfoReport.add(existingEntry != null ? "hit_1" : "added"); //$NON-NLS-1$ //$NON-NLS-2$
                }
                if (existingEntry != null) {
                    if ((result = existingEntry.tryGetInputStream(request)) != null) {
                        setResponseHeaders(request, response, existingEntry.getSize());
                        if (log.isLoggable(Level.FINEST)) {
                            log.finest(cacheInfoReport.toString() + "\n" + //$NON-NLS-1$
                                    "key:" + key + //$NON-NLS-1$
                                    "\n" + existingEntry.toString()); //$NON-NLS-1$
                        }
                        if (_isReportCacheInfo) {
                            request.setAttribute(LAYERBUILDCACHEKEY_PROPNAME, key);
                        }
                        return result;
                    } else if (existingEntry.isDeleted()) {
                        if (_layerBuilds.replace(key, existingEntry, newEntry)) {
                            // entry was replaced, use newEntry
                            if (cacheInfoReport != null) {
                                cacheInfoReport.add("replace_1"); //$NON-NLS-1$
                            }
                            existingEntry = null;
                        } else {
                            // Existing entry was removed from the cache by another thread
                            // between the time we retrieved it and the time we tried to
                            // replace it.  Try to add the new entry again.
                            if (cacheInfoReport != null) {
                                cacheInfoReport.add("retry_add"); //$NON-NLS-1$
                            }
                            if (--loopGuard == 0) {
                                // Should never happen, but just in case
                                throw new IllegalStateException();
                            }
                            continue;
                        }
                    }
                }
                break;
            } while (true);
        }

        // putIfAbsent() succeeded and the new entry was added to the cache
        entry = (existingEntry != null) ? existingEntry : newEntry;

        LayerBuilder layerBuilder = null;

        // List of Future<IModule.ModuleReader> objects that will be used to read the module
        // data from
        List<ICacheKeyGenerator> moduleKeyGens = null;

        // Synchronize on the LayerBuild object for the build.  This will prevent multiple
        // threads from building the same output.  If more than one thread requests the same
        // output (same cache key), then the first one to grab the sync object will win and
        // the rest will wait for the first thread to finish building and then just return
        // the output from the first thread when they wake.
        synchronized (entry) {

            // Check to see if data is available one more time in case a different thread finished
            // building the output while we were blocked on the sync object.
            if (!ignoreCached && key != null && (result = entry.tryGetInputStream(request)) != null) {
                if (cacheInfoReport != null) {
                    cacheInfoReport.add("hit_2"); //$NON-NLS-1$
                }
                setResponseHeaders(request, response, entry.getSize());
                if (log.isLoggable(Level.FINEST)) {
                    log.finest(cacheInfoReport.toString() + "\n" + //$NON-NLS-1$
                            "key:" + key + //$NON-NLS-1$
                            "\n" + entry.toString()); //$NON-NLS-1$
                }
                if (_isReportCacheInfo) {
                    request.setAttribute(LAYERBUILDCACHEKEY_PROPNAME, key);
                }
                return result;
            }

            boolean isGzip = RequestUtil.isGzipEncoding(request);
            ByteArrayOutputStream bos = new ByteArrayOutputStream();

            // See if we already have a cached response that uses a different gzip
            // encoding option.  If we do, then just zip (or unzip) the cached
            // response
            CacheEntry otherEntry = null;
            if (key != null) {
                StringBuffer sb = new StringBuffer();
                Matcher m = GZIPFLAG_KEY_PATTERN.matcher(key);
                m.find();
                m.appendReplacement(sb,
                        new StringBuffer(s_layerCacheKeyGenerators.get(0).toString()).append(":") //$NON-NLS-1$
                                .append("1".equals(m.group(1)) ? "0" : "1") //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
                                .append(":").toString() //$NON-NLS-1$
                ).appendTail(sb);
                otherEntry = _layerBuilds.get(sb.toString());
            }
            if (otherEntry != null) {
                if (isGzip) {
                    if (cacheInfoReport != null) {
                        cacheInfoReport.add("zip_unzipped"); //$NON-NLS-1$
                    }
                    // We need gzipped and the cached entry is unzipped
                    // Create the compression stream for the output
                    VariableGZIPOutputStream compress = new VariableGZIPOutputStream(bos, 10240); // is 10k too big?
                    compress.setLevel(Deflater.BEST_COMPRESSION);
                    Writer writer = new OutputStreamWriter(compress, "UTF-8"); //$NON-NLS-1$

                    // Copy the data from the input stream to the output, compressing as we go.
                    CopyUtil.copy(otherEntry.getInputStream(request), writer);
                } else {
                    if (cacheInfoReport != null) {
                        cacheInfoReport.add("unzip_zipped"); //$NON-NLS-1$
                    }
                    // We need unzipped and the cached entry is zipped.  Just unzip it
                    CopyUtil.copy(new GZIPInputStream(otherEntry.getInputStream(request)), bos);
                }
                // Set the buildReader to the LayerBuild and release the lock by exiting the sync block
                entry.setBytes(bos.toByteArray());
                if (!ignoreCached) {
                    _layerBuilds.replace(key, entry, entry); // updates entry weight in map
                    if (cacheInfoReport != null) {
                        cacheInfoReport.add("update_weights_1"); //$NON-NLS-1$
                    }
                    entry.persist(mgr);
                }
            } else {
                moduleKeyGens = new LinkedList<ICacheKeyGenerator>();

                ModuleList moduleList = getModules(request);

                // Remove the module list from the request to safe-guard it now that we don't
                // need it there anymore
                request.removeAttribute(MODULE_FILES_PROPNAME);

                // Create a BuildListReader from the list of Futures.  This reader will obtain a
                // ModuleReader from each of the Futures in the list and read data from each one in
                // succession until all the data has been read, blocking on each Future until the
                // reader becomes available.
                layerBuilder = new LayerBuilder(request, moduleKeyGens, moduleList);
                String layer = layerBuilder.build();

                if (isGzip) {
                    if (cacheInfoReport != null) {
                        cacheInfoReport.add("zip"); //$NON-NLS-1$
                    }
                    VariableGZIPOutputStream compress = new VariableGZIPOutputStream(bos, 10240); // is 10k too big?
                    compress.setLevel(Deflater.BEST_COMPRESSION);
                    Writer writer = new OutputStreamWriter(compress, "UTF-8"); //$NON-NLS-1$

                    // Copy the data from the input stream to the output, compressing as we go.
                    CopyUtil.copy(new StringReader(layer), writer);
                    // Set the buildReader to the LayerBuild and release the lock by exiting the sync block
                    entry.setBytes(bos.toByteArray());
                } else {
                    entry.setBytes(layer.getBytes());
                }

                // entry will be persisted below after we determine if cache key
                // generator needs to be updated
            }
        }

        // if any of the readers included an error response, then don't cache the layer.
        if (layerBuilder != null && layerBuilder.hasErrors()) {
            request.setAttribute(NOCACHE_RESPONSE_REQATTRNAME, Boolean.TRUE);
            if (cacheInfoReport != null) {
                cacheInfoReport.add(key == null ? "error_noaction" : "error_remove"); //$NON-NLS-1$ //$NON-NLS-2$
            }
            if (key != null) {
                _layerBuilds.remove(key, entry);
            }
        } else if (layerBuilder != null) {
            if (!ignoreCached) {
                // See if we need to create or update the cache key generators
                Map<String, ICacheKeyGenerator> newKeyGens = new HashMap<String, ICacheKeyGenerator>();
                Set<String> requiredModuleListDeps = getModules(request).getDependentFeatures();
                addCacheKeyGenerators(newKeyGens, s_layerCacheKeyGenerators);
                addCacheKeyGenerators(newKeyGens, aggr.getTransport().getCacheKeyGenerators());
                addCacheKeyGenerators(newKeyGens, Arrays.asList(new ICacheKeyGenerator[] {
                        new FeatureSetCacheKeyGenerator(requiredModuleListDeps, false) }));
                addCacheKeyGenerators(newKeyGens, moduleKeyGens);

                boolean cacheKeyGeneratorsUpdated = false;
                if (!newKeyGens.equals(cacheKeyGenerators)) {
                    // If we don't yet have a cache key for this layer, then get one
                    // from the cache key generators, and then update the cache key for this
                    // cache entry.

                    synchronized (this) {
                        if (_cacheKeyGenerators != null) {
                            addCacheKeyGenerators(newKeyGens, _cacheKeyGenerators.values());
                        }
                        _cacheKeyGenerators = Collections.unmodifiableMap(newKeyGens);
                    }
                    if (cacheInfoReport != null) {
                        cacheInfoReport.add("update_keygen"); //$NON-NLS-1$
                    }
                    cacheKeyGeneratorsUpdated = true;
                }
                final String originalKey = key;
                if (key == null || cacheKeyGeneratorsUpdated) {
                    if (cacheInfoReport != null) {
                        cacheInfoReport.add("update_key"); //$NON-NLS-1$
                    }
                    key = generateCacheKey(request, newKeyGens);
                }
                if (originalKey == null || !originalKey.equals(key)) {
                    /*
                     * The cache key has changed from what was originally used to put the
                     * un-built entry into the cache.  Add the LayerBuild to the cache
                     * using the new key.
                     */
                    if (log.isLoggable(Level.FINE)) {
                        log.fine("Key changed!  Adding layer to cache with key: " + key); //$NON-NLS-1$
                    }
                    final CacheEntry originalEntry = entry;
                    CacheEntry updateEntry = (originalKey == null) ? entry : new CacheEntry(entry);
                    CacheEntry previousEntry = _layerBuilds.putIfAbsent(key, updateEntry,
                            options.isDevelopmentMode());
                    if (cacheInfoReport != null) {
                        cacheInfoReport.add(previousEntry == null ? "update_add" : "update_hit"); //$NON-NLS-1$ //$NON-NLS-2$
                    }
                    // Write the file to disk only if the LayerBuild was successfully added to the cache
                    if (previousEntry == null) {
                        // Updated entry was added to the cache.
                        entry = updateEntry;
                        entry.persist(mgr);
                    }
                    // If the key changed, then remove the entry under the old key.  Use a
                    // delay to give other threads a chance to start using the new cache
                    // key generator.  No need to update entry weight in map
                    if (originalKey != null) {
                        aggr.getExecutors().getScheduledExecutor().schedule(new Runnable() {
                            public void run() {
                                _layerBuilds.remove(originalKey, originalEntry);
                            }
                        }, LAYERBUILD_REMOVE_DELAY_SECONDS, TimeUnit.SECONDS);
                    }
                } else {
                    if (cacheInfoReport != null) {
                        cacheInfoReport.add("update_weights_2"); //$NON-NLS-1$
                    }
                    _layerBuilds.replace(key, entry, entry); // updates entry weight in map
                    entry.persist(mgr);
                }
            }
        }
        result = entry.getInputStream(request);
        setResponseHeaders(request, response, entry.getSize());

        // return the input stream to the LayerBuild
        if (log.isLoggable(Level.FINEST)) {
            log.finest(cacheInfoReport.toString() + "\n" + //$NON-NLS-1$
                    "key:" + key + //$NON-NLS-1$
                    "\n" + entry.toString()); //$NON-NLS-1$
        }
        if (_isReportCacheInfo) {
            request.setAttribute(LAYERBUILDCACHEKEY_PROPNAME, key);
        }
        return result;
    } catch (IOException e) {
        _layerBuilds.remove(key, entry);
        throw e;
    } catch (RuntimeException e) {
        _layerBuilds.remove(key, entry);
        throw e;
    } finally {
        if (_layerBuilds.isLayerEvicted()) {
            _layerBuilds.removeLayerFromCache(this);
        }
    }
}

From source file:com.sun.grizzly.http.jk.common.JniHandler.java

protected void setNativeEndpoint(MsgContext msgCtx) {
    long xEnv = apr.getJkEnv();
    msgCtx.setJniEnv(xEnv);/*from w  ww.  j  av  a2  s  .  c  om*/

    long epP = apr.createJkHandler(xEnv, "endpoint");
    LoggerUtils.getLogger().log(Level.FINEST, "create ep " + epP);
    if (epP == 0) {
        return;
    }
    apr.jkInit(xEnv, epP);
    msgCtx.setJniContext(epP);

}

From source file:ffx.potential.parameters.TorsionTorsionType.java

/**
 * <p>/*from  w w w.  j  a  v a2 s  .  c o m*/
 * Constructor for TorsionTorsionType.</p>
 *
 * @param atomClasses an array of int.
 * @param gridPoints an array of int.
 * @param torsion1 an array of double.
 * @param torsion2 an array of double.
 * @param energy an array of double.
 */
public TorsionTorsionType(int atomClasses[], int gridPoints[], double torsion1[], double torsion2[],
        double energy[]) {
    super(ForceField.ForceFieldType.TORTORS, sortKey(atomClasses));
    this.atomClasses = atomClasses;
    nx = gridPoints[0];
    ny = gridPoints[1];
    if (nx != ny) {
        logger.severe("Untested TORTOR parameters: nx != ny: " + nx + ", " + ny);
    }
    this.torsion1 = torsion1;
    this.torsion2 = torsion2;
    this.energy = energy;
    this.gridPoints = gridPoints;
    tx = new double[nx];
    ty = new double[ny];
    dx = new double[nx * ny];
    dy = new double[nx * ny];
    dxy = new double[nx * ny];
    Arrays.sort(torsion1);
    Arrays.sort(torsion2);
    tx[0] = torsion1[0];
    ty[0] = torsion2[0];
    int j1 = 1;
    int j2 = 1;
    for (int i = 1; i < nx; i++) {
        while (torsion1[j1] == tx[i - 1]) {
            j1++;
        }
        while (torsion2[j2] == ty[i - 1]) {
            j2++;
        }
        tx[i] = torsion1[j1];
        ty[i] = torsion2[j2];
    }

    /**
     * Check for cyclic energy.
     */
    boolean isCyclic = true;
    double eps = 0.0001;
    if (abs(tx[0] - tx[nx - 1]) - 360.0 > eps) {
        isCyclic = false;
        if (logger.isLoggable(Level.FINEST)) {
            logger.finest(" tortor is aperiodic: " + tx[0] + ", " + tx[nx - 1]);
        }
    }
    if (isCyclic) {
        for (int i = 0; i < ny; i++) {
            int k = i * nx;
            if (abs(energy[k] - energy[k + nx - 1]) > eps) {
                isCyclic = false;
                if (logger.isLoggable(Level.FINEST)) {
                    logger.finest(" tortor is apreriodic: " + k + ", " + (k + nx - 1) + ": "
                            + abs(energy[k] - energy[k + nx - 1]));
                }
                break;
            }
        }
    }
    if (isCyclic) {
        int k = (ny - 1) * nx;
        for (int i = 0; i < nx; i++) {
            if (abs(energy[i] - energy[i + k]) > eps) {
                if (logger.isLoggable(Level.FINEST)) {
                    logger.fine(" tortor is aperiodic: " + i + ", " + i + k + ": "
                            + abs(energy[i] - energy[i + k]));
                }
                isCyclic = false;
                break;
            }
        }
    }
    cyclic = isCyclic;
    double tmp1[] = new double[nx];
    double tmp2[] = new double[nx];
    double tmp3[] = new double[nx];
    double tmp4[] = new double[nx];
    double tmp5[] = new double[nx];
    double tmp6[] = new double[nx];
    double tmp7[] = new double[nx];
    double bs[] = new double[nx];
    double cs[] = new double[nx];
    double ds[] = new double[nx];

    /**
     * Spline fit the derivatives about the first torsion.
     */
    for (int i = 0; i < nx; i++) {
        tmp1[i] = tx[i];
    }
    int m = 0;
    for (int j = 0; j < ny; j++) {
        for (int k = 0; k < nx; k++) {
            tmp2[k] = energy[m + k];
        }
        if (cyclic) {
            cspline(nx - 1, tmp1, tmp2, bs, cs, ds, tmp3, tmp4, tmp5, tmp6, tmp7);
        } else {
            nspline(nx - 1, tmp1, tmp2, 0.0e0, 0.0e0, bs, cs, tmp3, tmp4, tmp5, tmp6, tmp7);
        }

        for (int k = 0; k < nx; k++) {
            dx[m + k] = bs[k];
        }
        m = m + nx;
    }

    /**
     * Spline fit the derivatives about the second torsion.
     */
    for (int i = 0; i < ny; i++) {
        tmp1[i] = ty[i];
    }
    m = 0;
    for (int j = 0; j < nx; j++) {
        for (int k = 0; k < ny; k++) {
            tmp2[k] = energy[m + k * nx];
        }
        if (cyclic) {
            cspline(ny - 1, tmp1, tmp2, bs, cs, ds, tmp3, tmp4, tmp5, tmp6, tmp7);
        } else {
            nspline(ny - 1, tmp1, tmp2, 0.0e0, 0.0e0, bs, cs, tmp3, tmp4, tmp5, tmp6, tmp7);
        }
        for (int k = 0; k < ny; k++) {
            dy[m + k * nx] = bs[k];
        }
        m = m + 1;
    }

    /**
     * Spline fit the cross derivatives about both torsions.
     */
    m = 0;
    for (int j = 0; j < nx; j++) {
        for (int k = 0; k < ny; k++) {
            tmp2[k] = dx[m + k * nx];
        }
        if (cyclic) {
            cspline(ny - 1, tmp1, tmp2, bs, cs, ds, tmp3, tmp4, tmp5, tmp6, tmp7);
        } else {
            nspline(ny - 1, tmp1, tmp2, 0.0e0, 0.0e0, bs, cs, tmp3, tmp4, tmp5, tmp6, tmp7);
        }
        for (int k = 0; k < ny; k++) {
            dxy[m + k * nx] = bs[k];
        }
        m = m + 1;
    }
}

From source file:fr.logfiletoes.ElasticSearchTest.java

@Test
public void fail2ban() throws IOException, InterruptedException {
    File data = Files.createTempDirectory("it_es_data-").toFile();
    Settings settings = ImmutableSettings.settingsBuilder().put("path.data", data.toString())
            .put("cluster.name", "IT-0002").build();
    Node node = NodeBuilder.nodeBuilder().local(true).settings(settings).build();
    Client client = node.client();/*  w  ww .j  a va 2  s.  c om*/
    node.start();
    Config config = new Config("./src/test/resources/fail2ban.json");
    List<Unit> units = config.getUnits();
    assertEquals(1, units.size());
    units.get(0).start();

    // Wait store log
    Thread.sleep(3000);

    // Search log
    SearchResponse response = client.prepareSearch("system").setSearchType(SearchType.DEFAULT)
            .setQuery(QueryBuilders.matchQuery("message", "58.218.204.248")).setSize(1000)
            .addSort("@timestamp", SortOrder.ASC).execute().actionGet();
    if (LOG.isLoggable(Level.FINEST)) {
        for (SearchHit hit : response.getHits().getHits()) {
            LOG.finest("-----------------");
            hit.getSource().forEach((key, value) -> {
                LOG.log(Level.FINEST, "{0} = {1}", new Object[] { key, value });
            });
        }
    }

    // Get information need to test
    assertEquals(6, response.getHits().getHits().length);
    assertEquals("Found 58.218.204.248", response.getHits().getHits()[0].getSource().get("message").toString());
    assertEquals("Ban 58.218.204.248", response.getHits().getHits()[5].getSource().get("message").toString());

    // wait request
    Thread.sleep(10000);

    // Close tailer
    units.get(0).stop();

    // Close ElasticSearch
    node.close();

    // Clean data directory
    FileUtils.forceDelete(data);
}

From source file:org.apache.reef.io.network.NetworkConnectionServiceTest.java

/**
 * NetworkService messaging rate benchmark.
 *//*  w w w. j a  v  a2  s.  c o m*/
@Test
public void testMessagingNetworkConnServiceRateDisjoint() throws Exception {

    Assume.assumeFalse("Use log level INFO to run benchmarking", LOG.isLoggable(Level.FINEST));

    LOG.log(Level.FINEST, name.getMethodName());

    final BlockingQueue<Object> barrier = new LinkedBlockingQueue<>();

    final int numThreads = 4;
    final int size = 2000;
    final int numMessages = 300000 / (Math.max(1, size / 512));
    final int totalNumMessages = numMessages * numThreads;
    final String message = StringUtils.repeat('1', size);

    final ExecutorService e = Executors.newCachedThreadPool();
    for (int t = 0; t < numThreads; t++) {
        final int tt = t;

        e.submit(new Runnable() {
            public void run() {
                try (final NetworkMessagingTestService messagingTestService = new NetworkMessagingTestService(
                        localAddress)) {
                    final Monitor monitor = new Monitor();
                    final Codec<String> codec = new StringCodec();

                    messagingTestService.registerTestConnectionFactory(groupCommClientId, numMessages, monitor,
                            codec);
                    try (final Connection<String> conn = messagingTestService
                            .getConnectionFromSenderToReceiver(groupCommClientId)) {
                        try {
                            conn.open();
                            for (int count = 0; count < numMessages; ++count) {
                                // send messages to the receiver.
                                conn.write(message);
                            }
                            monitor.mwait();
                        } catch (final NetworkException e) {
                            e.printStackTrace();
                            throw new RuntimeException(e);
                        }
                    }
                } catch (final Exception e) {
                    throw new RuntimeException(e);
                }
            }
        });
    }

    // start and time
    final long start = System.currentTimeMillis();
    final Object ignore = new Object();
    for (int i = 0; i < numThreads; i++) {
        barrier.add(ignore);
    }
    e.shutdown();
    e.awaitTermination(100, TimeUnit.SECONDS);
    final long end = System.currentTimeMillis();
    final double runtime = ((double) end - start) / 1000;
    LOG.log(Level.INFO, "size: " + size + "; messages/s: " + totalNumMessages / runtime
            + " bandwidth(bytes/s): " + ((double) totalNumMessages * 2 * size) / runtime); // x2 for unicode chars
}

From source file:com.sencko.basketball.stats.advanced.FIBAJsonParser.java

private static void addToCache(String cacheName, Game game)
        throws FileNotFoundException, UnsupportedEncodingException, IOException {
    logger.log(Level.FINEST, "Saving file {0} to cache", cacheName);
    File file = new File("archive.zip");
    File file1 = null;/*from  w w  w  . j  a  va  2  s  .c  o  m*/
    if (file.exists()) {
        //copy to archive1, return
        file1 = new File("archive1.zip");
        if (file1.exists()) {
            if (!file1.delete()) {
                logger.log(Level.WARNING, "Unable to delete file {0}", file1.getCanonicalPath());
                return;
            }
        }
        if (!file.renameTo(file1)) {
            logger.log(Level.WARNING, "Unable to rename file {0} to {1}",
                    new Object[] { file.getCanonicalPath(), file1.getCanonicalPath() });
            // unable to move to archive1 and whole operation fails!!!
            return;
        }
    }

    try (ZipOutputStream out = new ZipOutputStream(new FileOutputStream(file))) {
        out.setLevel(9);
        // name the file inside the zip  file 
        out.putNextEntry(new ZipEntry(cacheName));
        OutputStreamWriter outputStreamWriter = new OutputStreamWriter(out, "UTF-8");
        JsonWriter jsonWriter = new JsonWriter(outputStreamWriter);
        jsonWriter.setIndent("  ");
        builder.create().toJson(game, Game.class, jsonWriter);
        jsonWriter.flush();

        if (file1 != null) {
            try (ZipFile zipFile = new ZipFile(file1)) {
                Enumeration<? extends ZipEntry> files = zipFile.entries();
                while (files.hasMoreElements()) {
                    ZipEntry entry = files.nextElement();
                    try (InputStream in = zipFile.getInputStream(entry)) {
                        out.putNextEntry(new ZipEntry(entry.getName()));

                        IOUtils.copy(in, out);
                    }
                }
            }
            file1.delete();

        }
    }
}