Example usage for java.util.concurrent.atomic AtomicLong AtomicLong

List of usage examples for java.util.concurrent.atomic AtomicLong AtomicLong

Introduction

In this page you can find the example usage for java.util.concurrent.atomic AtomicLong AtomicLong.

Prototype

public AtomicLong(long initialValue) 

Source Link

Document

Creates a new AtomicLong with the given initial value.

Usage

From source file:okuyama.imdst.util.KeyManagerValueMap.java

private void totalDataSizeCalc(Object key, Object value) {
    if (!ImdstDefine.calcSizeFlg)
        return;// w w w.ja v  a  2  s.  c  om

    long addSize = 0L;

    if (value != null)
        addSize = new Double((((String) key).length() + ((String) value).length()) * 0.8).longValue();

    if (addSize != 0L)
        addSize = addSize + 20;

    String unique = null;
    String keyStr = (String) key;
    int beforeSize = 0;
    AtomicLong size = null;
    int nowValLen = 0;

    if (keyStr.indexOf("#") == 0) {

        unique = keyStr.substring(0, 6);
    } else {
        unique = "all";
    }

    if (mapValueInSize) {
        String val = (String) super.get(key);

        if (val != null) {
            nowValLen = new Double(
                    (((String) key).length() + new Integer(((String[]) val.split(":"))[1]).intValue()) * 0.8)
                            .intValue()
                    + 20;
        }
    } else {

        Object val = this.get(key);

        if (val != null) {
            nowValLen = new Double((((String) key).length() + ((String) val).length()) * 0.8).intValue() + 20;
        }
    }
    if (nowValLen != 0) {
        beforeSize = nowValLen * -1;
    }

    if (!dataSizeMap.containsKey(unique)) {
        size = new AtomicLong(0L);
        dataSizeMap.put(unique, size);
    } else {
        size = (AtomicLong) dataSizeMap.get(unique);
    }

    // ?
    size.getAndAdd(beforeSize);
    size.getAndAdd(addSize);
}

From source file:org.apache.hadoop.hbase.regionserver.wal.TestHLog.java

/**
 * Tests that we can write out an edit, close, and then read it back in again.
 * @throws IOException/*  www.j  a va  2 s  . co  m*/
 */
@Test
public void testEditAdd() throws IOException {
    final int COL_COUNT = 10;
    final TableName tableName = TableName.valueOf("tablename");
    final byte[] row = Bytes.toBytes("row");
    HLog.Reader reader = null;
    HLog log = null;
    try {
        log = HLogFactory.createHLog(fs, hbaseDir, getName(), conf);
        final AtomicLong sequenceId = new AtomicLong(1);

        // Write columns named 1, 2, 3, etc. and then values of single byte
        // 1, 2, 3...
        long timestamp = System.currentTimeMillis();
        WALEdit cols = new WALEdit();
        for (int i = 0; i < COL_COUNT; i++) {
            cols.add(new KeyValue(row, Bytes.toBytes("column"), Bytes.toBytes(Integer.toString(i)), timestamp,
                    new byte[] { (byte) (i + '0') }));
        }
        HRegionInfo info = new HRegionInfo(tableName, row, Bytes.toBytes(Bytes.toString(row) + "1"), false);
        HTableDescriptor htd = new HTableDescriptor();
        htd.addFamily(new HColumnDescriptor("column"));

        log.append(info, tableName, cols, System.currentTimeMillis(), htd, sequenceId);
        log.startCacheFlush(info.getEncodedNameAsBytes());
        log.completeCacheFlush(info.getEncodedNameAsBytes());
        log.close();
        Path filename = ((FSHLog) log).computeFilename();
        log = null;
        // Now open a reader on the log and assert append worked.
        reader = HLogFactory.createReader(fs, filename, conf);
        // Above we added all columns on a single row so we only read one
        // entry in the below... thats why we have '1'.
        for (int i = 0; i < 1; i++) {
            HLog.Entry entry = reader.next(null);
            if (entry == null)
                break;
            HLogKey key = entry.getKey();
            WALEdit val = entry.getEdit();
            assertTrue(Bytes.equals(info.getEncodedNameAsBytes(), key.getEncodedRegionName()));
            assertTrue(tableName.equals(key.getTablename()));
            KeyValue kv = val.getKeyValues().get(0);
            assertTrue(Bytes.equals(row, kv.getRow()));
            assertEquals((byte) (i + '0'), kv.getValue()[0]);
            System.out.println(key + " " + val);
        }
    } finally {
        if (log != null) {
            log.closeAndDelete();
        }
        if (reader != null) {
            reader.close();
        }
    }
}

From source file:okuyama.imdst.util.KeyManagerValueMap.java

public long getDataUseSize(String unique) {

    AtomicLong size = new AtomicLong(0L);

    if (unique == null)
        unique = "all";

    if (dataSizeMap.containsKey(unique)) {

        size = (AtomicLong) dataSizeMap.get(unique);
    }//from   w  ww .j av a2  s  . c  o  m

    return size.longValue();
}

From source file:org.apache.hadoop.hbase.wal.TestWALFactory.java

/**
 * Test that we can visit entries before they are appended
 * @throws Exception//from w  w  w  .j  a  va2 s. c  o  m
 */
@Test
public void testVisitors() throws Exception {
    final int COL_COUNT = 10;
    final TableName tableName = TableName.valueOf("tablename");
    final byte[] row = Bytes.toBytes("row");
    final DumbWALActionsListener visitor = new DumbWALActionsListener();
    final AtomicLong sequenceId = new AtomicLong(1);
    long timestamp = System.currentTimeMillis();
    HTableDescriptor htd = new HTableDescriptor(tableName);
    htd.addFamily(new HColumnDescriptor("column"));

    HRegionInfo hri = new HRegionInfo(tableName, HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW);
    final WAL log = wals.getWAL(hri.getEncodedNameAsBytes());
    log.registerWALActionsListener(visitor);
    for (int i = 0; i < COL_COUNT; i++) {
        WALEdit cols = new WALEdit();
        cols.add(new KeyValue(row, Bytes.toBytes("column"), Bytes.toBytes(Integer.toString(i)), timestamp,
                new byte[] { (byte) (i + '0') }));
        log.append(htd, hri, new WALKey(hri.getEncodedNameAsBytes(), tableName, System.currentTimeMillis()),
                cols, sequenceId, true, null);
    }
    log.sync();
    assertEquals(COL_COUNT, visitor.increments);
    log.unregisterWALActionsListener(visitor);
    WALEdit cols = new WALEdit();
    cols.add(new KeyValue(row, Bytes.toBytes("column"), Bytes.toBytes(Integer.toString(11)), timestamp,
            new byte[] { (byte) (11 + '0') }));
    log.append(htd, hri, new WALKey(hri.getEncodedNameAsBytes(), tableName, System.currentTimeMillis()), cols,
            sequenceId, true, null);
    log.sync();
    assertEquals(COL_COUNT, visitor.increments);
}

From source file:org.apache.druid.java.util.common.CompressionUtilsTest.java

@Test(expected = IOException.class)
public void testStreamErrorGunzip() throws Exception {
    final File tmpDir = temporaryFolder.newFolder("testGoodGzipByteSource");
    final File gzFile = new File(tmpDir, testFile.getName() + ".gz");
    Assert.assertFalse(gzFile.exists());
    CompressionUtils.gzip(Files.asByteSource(testFile), Files.asByteSink(gzFile), Predicates.alwaysTrue());
    Assert.assertTrue(gzFile.exists());/* w  ww  .j a  v  a  2s .c o  m*/
    try (final InputStream inputStream = CompressionUtils.decompress(new FileInputStream(gzFile), "file.gz")) {
        assertGoodDataStream(inputStream);
    }
    if (testFile.exists() && !testFile.delete()) {
        throw new RE("Unable to delete file [%s]", testFile.getAbsolutePath());
    }
    Assert.assertFalse(testFile.exists());
    final AtomicLong flushes = new AtomicLong(0L);
    CompressionUtils.gunzip(new FileInputStream(gzFile), new FilterOutputStream(new FileOutputStream(testFile) {
        @Override
        public void flush() throws IOException {
            if (flushes.getAndIncrement() > 0) {
                super.flush();
            } else {
                throw new IOException("Test exception");
            }
        }
    }));
}

From source file:org.killbill.queue.TestDBBackedQueue.java

@Test(groups = "slow")
public void testMultipleWritersMultipleReaders() throws InterruptedException {

    final PersistentBusConfig config = createConfig(7, 100, false, true);
    queue = new DBBackedQueue<BusEventModelDao>(clock, sqlDao, config, "multipleReaderMultipleWriter-bus_event",
            metricRegistry, databaseTransactionNotificationApi);
    queue.initialize();//from  ww w.ja v  a  2  s .  com

    final Thread[] writers = new Thread[2];
    final Thread[] readers = new Thread[3];

    writers[0] = new Thread(new WriterRunnable(0, 1000, queue));
    writers[1] = new Thread(new WriterRunnable(1, 1000, queue));

    final AtomicLong consumed = new AtomicLong(0);
    readers[0] = new Thread(new ReaderRunnable(0, consumed, 2000, queue));
    readers[1] = new Thread(new ReaderRunnable(1, consumed, 2000, queue));
    readers[2] = new Thread(new ReaderRunnable(2, consumed, 2000, queue));

    writers[0].start();
    writers[1].start();

    while (queue.isQueueOpenForWrite()) {
        try {
            Thread.sleep(10);
        } catch (InterruptedException e) {
        }
    }
    readers[0].start();
    readers[1].start();
    readers[2].start();

    try {
        writers[0].join();
        writers[1].join();
        readers[0].join();
        readers[1].join();
        readers[2].join();
    } catch (InterruptedException e) {
        e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
    }

    final List<BusEventModelDao> ready = sqlDao.getReadyEntries(clock.getUTCNow().toDate(), 1000, OWNER,
            "bus_events");
    assertEquals(ready.size(), 0);

    log.info("Got inflightProcessed = " + queue.getTotalInflightFetched() + "/1000, inflightWritten = "
            + queue.getTotalInflightInsert() + "/1000");
    assertEquals(queue.getTotalInsert(), 2000);
}

From source file:org.apereo.portal.io.xml.JaxbPortalDataHandlerService.java

@Override
public void importDataDirectory(File directory, String pattern, final BatchImportOptions options) {
    if (!directory.exists()) {
        throw new IllegalArgumentException("The specified directory '" + directory + "' does not exist");
    }//from  w  ww . jav a 2  s .  c  om

    //Create the file filter to use when searching for files to import
    final FileFilter fileFilter;
    if (pattern != null) {
        fileFilter = new AntPatternFileFilter(true, false, pattern, this.dataFileExcludes);
    } else {
        fileFilter = new AntPatternFileFilter(true, false, this.dataFileIncludes, this.dataFileExcludes);
    }

    //Determine the parent directory to log to
    final File logDirectory = determineLogDirectory(options, "import");

    //Setup reporting file
    final File importReport = new File(logDirectory, "data-import.txt");
    final PrintWriter reportWriter;
    try {
        reportWriter = new PrintWriter(new PeriodicFlushingBufferedWriter(500, new FileWriter(importReport)));
    } catch (IOException e) {
        throw new RuntimeException("Failed to create FileWriter for: " + importReport, e);
    }

    //Convert directory to URI String to provide better logging output
    final URI directoryUri = directory.toURI();
    final String directoryUriStr = directoryUri.toString();
    IMPORT_BASE_DIR.set(directoryUriStr);
    try {
        //Scan the specified directory for files to import
        logger.info("Scanning for files to Import from: {}", directory);
        final PortalDataKeyFileProcessor fileProcessor = new PortalDataKeyFileProcessor(this.dataKeyTypes,
                options);
        this.directoryScanner.scanDirectoryNoResults(directory, fileFilter, fileProcessor);
        final long resourceCount = fileProcessor.getResourceCount();
        logger.info("Found {} files to Import from: {}", resourceCount, directory);

        //See if the import should fail on error
        final boolean failOnError = options != null ? options.isFailOnError() : true;

        //Map of files to import, grouped by type
        final ConcurrentMap<PortalDataKey, Queue<Resource>> dataToImport = fileProcessor.getDataToImport();

        //Import the data files
        for (final PortalDataKey portalDataKey : this.dataKeyImportOrder) {
            final Queue<Resource> files = dataToImport.remove(portalDataKey);
            if (files == null) {
                continue;
            }

            final Queue<ImportFuture<?>> importFutures = new LinkedList<ImportFuture<?>>();
            final List<FutureHolder<?>> failedFutures = new LinkedList<FutureHolder<?>>();

            final int fileCount = files.size();
            logger.info("Importing {} files of type {}", fileCount, portalDataKey);
            reportWriter.println(portalDataKey + "," + fileCount);

            while (!files.isEmpty()) {
                final Resource file = files.poll();

                //Check for completed futures on every iteration, needed to fail as fast as possible on an import exception
                final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter,
                        logDirectory, false);
                failedFutures.addAll(newFailed);

                final AtomicLong importTime = new AtomicLong(-1);

                //Create import task
                final Callable<Object> task = new CallableWithoutResult() {
                    @Override
                    protected void callWithoutResult() {
                        IMPORT_BASE_DIR.set(directoryUriStr);
                        importTime.set(System.nanoTime());
                        try {
                            importData(file, portalDataKey);
                        } finally {
                            importTime.set(System.nanoTime() - importTime.get());
                            IMPORT_BASE_DIR.remove();
                        }
                    }
                };

                //Submit the import task
                final Future<?> importFuture = this.importExportThreadPool.submit(task);

                //Add the future for tracking
                importFutures.offer(new ImportFuture(importFuture, file, portalDataKey, importTime));
            }

            //Wait for all of the imports on of this type to complete
            final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter, logDirectory,
                    true);
            failedFutures.addAll(newFailed);

            if (failOnError && !failedFutures.isEmpty()) {
                throw new RuntimeException(
                        failedFutures.size() + " " + portalDataKey + " entities failed to import.\n\n"
                                + "\tPer entity exception logs and a full report can be found in "
                                + logDirectory + "\n");
            }

            reportWriter.flush();
        }

        if (!dataToImport.isEmpty()) {
            throw new IllegalStateException(
                    "The following PortalDataKeys are not listed in the dataTypeImportOrder List: "
                            + dataToImport.keySet());
        }

        logger.info("For a detailed report on the data import see " + importReport);
    } catch (InterruptedException e) {
        throw new RuntimeException("Interrupted while waiting for entities to import", e);
    } finally {
        IOUtils.closeQuietly(reportWriter);
        IMPORT_BASE_DIR.remove();
    }
}

From source file:io.warp10.continuum.egress.EgressFetchHandler.java

@Override
public void handle(String target, Request baseRequest, HttpServletRequest req, HttpServletResponse resp)
        throws IOException, ServletException {
    boolean fromArchive = false;
    boolean splitFetch = false;
    boolean writeTimestamp = false;

    if (Constants.API_ENDPOINT_FETCH.equals(target)) {
        baseRequest.setHandled(true);/*from   w w w .  jav a 2  s . c om*/
        fromArchive = false;
    } else if (Constants.API_ENDPOINT_AFETCH.equals(target)) {
        baseRequest.setHandled(true);
        fromArchive = true;
    } else if (Constants.API_ENDPOINT_SFETCH.equals(target)) {
        baseRequest.setHandled(true);
        splitFetch = true;
    } else if (Constants.API_ENDPOINT_CHECK.equals(target)) {
        baseRequest.setHandled(true);
        resp.setStatus(HttpServletResponse.SC_OK);
        return;
    } else {
        return;
    }

    try {
        // Labels for Sensision
        Map<String, String> labels = new HashMap<String, String>();

        labels.put(SensisionConstants.SENSISION_LABEL_TYPE, target);

        //
        // Add CORS header
        //

        resp.setHeader("Access-Control-Allow-Origin", "*");

        String start = null;
        String stop = null;

        long now = Long.MIN_VALUE;
        long timespan = 0L;

        String nowParam = null;
        String timespanParam = null;
        String dedupParam = null;
        String showErrorsParam = null;

        if (splitFetch) {
            nowParam = req.getHeader(Constants.getHeader(Configuration.HTTP_HEADER_NOW_HEADERX));
            timespanParam = req.getHeader(Constants.getHeader(Configuration.HTTP_HEADER_TIMESPAN_HEADERX));
            showErrorsParam = req.getHeader(Constants.getHeader(Configuration.HTTP_HEADER_SHOW_ERRORS_HEADERX));
        } else {
            start = req.getParameter(Constants.HTTP_PARAM_START);
            stop = req.getParameter(Constants.HTTP_PARAM_STOP);

            nowParam = req.getParameter(Constants.HTTP_PARAM_NOW);
            timespanParam = req.getParameter(Constants.HTTP_PARAM_TIMESPAN);
            dedupParam = req.getParameter(Constants.HTTP_PARAM_DEDUP);
            showErrorsParam = req.getParameter(Constants.HTTP_PARAM_SHOW_ERRORS);
        }

        String maxDecoderLenParam = req.getParameter(Constants.HTTP_PARAM_MAXSIZE);
        int maxDecoderLen = null != maxDecoderLenParam ? Integer.parseInt(maxDecoderLenParam)
                : Constants.DEFAULT_PACKED_MAXSIZE;

        String suffix = req.getParameter(Constants.HTTP_PARAM_SUFFIX);
        if (null == suffix) {
            suffix = Constants.DEFAULT_PACKED_CLASS_SUFFIX;
        }

        boolean unpack = null != req.getParameter(Constants.HTTP_PARAM_UNPACK);

        long chunksize = Long.MAX_VALUE;

        if (null != req.getParameter(Constants.HTTP_PARAM_CHUNKSIZE)) {
            chunksize = Long.parseLong(req.getParameter(Constants.HTTP_PARAM_CHUNKSIZE));
        }

        if (chunksize <= 0) {
            throw new IOException("Invalid chunksize.");
        }

        boolean showErrors = null != showErrorsParam;
        boolean dedup = null != dedupParam && "true".equals(dedupParam);

        if (null != start && null != stop) {
            long tsstart = fmt.parseDateTime(start).getMillis() * Constants.TIME_UNITS_PER_MS;
            long tsstop = fmt.parseDateTime(stop).getMillis() * Constants.TIME_UNITS_PER_MS;

            if (tsstart < tsstop) {
                now = tsstop;
                timespan = tsstop - tsstart;
            } else {
                now = tsstart;
                timespan = tsstart - tsstop;
            }
        } else if (null != nowParam && null != timespanParam) {
            if ("now".equals(nowParam)) {
                now = TimeSource.getTime();
            } else {
                try {
                    now = Long.parseLong(nowParam);
                } catch (Exception e) {
                    now = fmt.parseDateTime(nowParam).getMillis() * Constants.TIME_UNITS_PER_MS;
                }
            }

            timespan = Long.parseLong(timespanParam);
        }

        if (Long.MIN_VALUE == now) {
            resp.sendError(HttpServletResponse.SC_BAD_REQUEST,
                    "Missing now/timespan or start/stop parameters.");
            return;
        }

        String selector = splitFetch ? null : req.getParameter(Constants.HTTP_PARAM_SELECTOR);

        //
        // Extract token from header
        //

        String token = req.getHeader(Constants.getHeader(Configuration.HTTP_HEADER_TOKENX));

        // If token was not found in header, extract it from the 'token' parameter
        if (null == token && !splitFetch) {
            token = req.getParameter(Constants.HTTP_PARAM_TOKEN);
        }

        String fetchSig = req.getHeader(Constants.getHeader(Configuration.HTTP_HEADER_FETCH_SIGNATURE));

        //
        // Check token signature if it was provided
        //

        boolean signed = false;

        if (splitFetch) {
            // Force showErrors
            showErrors = true;
            signed = true;
        }

        if (null != fetchSig) {
            if (null != fetchPSK) {
                String[] subelts = fetchSig.split(":");
                if (2 != subelts.length) {
                    throw new IOException("Invalid fetch signature.");
                }
                long nowts = System.currentTimeMillis();
                long sigts = new BigInteger(subelts[0], 16).longValue();
                long sighash = new BigInteger(subelts[1], 16).longValue();

                if (nowts - sigts > 10000L) {
                    throw new IOException("Fetch signature has expired.");
                }

                // Recompute hash of ts:token

                String tstoken = Long.toString(sigts) + ":" + token;

                long checkedhash = SipHashInline.hash24(fetchPSK, tstoken.getBytes(Charsets.ISO_8859_1));

                if (checkedhash != sighash) {
                    throw new IOException("Corrupted fetch signature");
                }

                signed = true;
            } else {
                throw new IOException("Fetch PreSharedKey is not set.");
            }
        }

        ReadToken rtoken = null;

        String format = splitFetch ? "wrapper" : req.getParameter(Constants.HTTP_PARAM_FORMAT);

        if (!splitFetch) {
            try {
                rtoken = Tokens.extractReadToken(token);

                if (rtoken.getHooksSize() > 0) {
                    throw new IOException("Tokens with hooks cannot be used for fetching data.");
                }
            } catch (WarpScriptException ee) {
                throw new IOException(ee);
            }

            if (null == rtoken) {
                resp.sendError(HttpServletResponse.SC_FORBIDDEN, "Missing token.");
                return;
            }
        }

        boolean showAttr = "true".equals(req.getParameter(Constants.HTTP_PARAM_SHOWATTR));

        boolean sortMeta = "true".equals(req.getParameter(Constants.HTTP_PARAM_SORTMETA));

        //
        // Extract the class and labels selectors
        // The class selector and label selectors are supposed to have
        // values which use percent encoding, i.e. explicit percent encoding which
        // might have been re-encoded using percent encoding when passed as parameter
        //
        //

        Set<Metadata> metadatas = new HashSet<Metadata>();
        List<Iterator<Metadata>> iterators = new ArrayList<Iterator<Metadata>>();

        if (!splitFetch) {

            if (null == selector) {
                throw new IOException("Missing '" + Constants.HTTP_PARAM_SELECTOR + "' parameter.");
            }

            String[] selectors = selector.split("\\s+");

            for (String sel : selectors) {
                Matcher m = SELECTOR_RE.matcher(sel);

                if (!m.matches()) {
                    resp.sendError(HttpServletResponse.SC_BAD_REQUEST);
                    return;
                }

                String classSelector = URLDecoder.decode(m.group(1), "UTF-8");
                String labelsSelection = m.group(2);

                Map<String, String> labelsSelectors;

                try {
                    labelsSelectors = GTSHelper.parseLabelsSelectors(labelsSelection);
                } catch (ParseException pe) {
                    throw new IOException(pe);
                }

                //
                // Force 'producer'/'owner'/'app' from token
                //

                labelsSelectors.remove(Constants.PRODUCER_LABEL);
                labelsSelectors.remove(Constants.OWNER_LABEL);
                labelsSelectors.remove(Constants.APPLICATION_LABEL);

                labelsSelectors.putAll(Tokens.labelSelectorsFromReadToken(rtoken));

                List<Metadata> metas = null;

                List<String> clsSels = new ArrayList<String>();
                List<Map<String, String>> lblsSels = new ArrayList<Map<String, String>>();

                clsSels.add(classSelector);
                lblsSels.add(labelsSelectors);

                try {
                    metas = directoryClient.find(clsSels, lblsSels);
                    metadatas.addAll(metas);
                } catch (Exception e) {
                    //
                    // If metadatas is not empty, create an iterator for it, then clear it
                    //
                    if (!metadatas.isEmpty()) {
                        iterators.add(metadatas.iterator());
                        metadatas.clear();
                    }
                    iterators.add(directoryClient.iterator(clsSels, lblsSels));
                }
            }
        } else {
            //
            // Add an iterator which reads splits from the request body
            //

            boolean gzipped = false;

            if (null != req.getHeader("Content-Type")
                    && "application/gzip".equals(req.getHeader("Content-Type"))) {
                gzipped = true;
            }

            BufferedReader br = null;

            if (gzipped) {
                GZIPInputStream is = new GZIPInputStream(req.getInputStream());
                br = new BufferedReader(new InputStreamReader(is));
            } else {
                br = req.getReader();
            }

            final BufferedReader fbr = br;

            MetadataIterator iterator = new MetadataIterator() {

                private List<Metadata> metadatas = new ArrayList<Metadata>();

                private boolean done = false;

                private String lasttoken = "";

                @Override
                public void close() throws Exception {
                    fbr.close();
                }

                @Override
                public Metadata next() {
                    if (!metadatas.isEmpty()) {
                        Metadata meta = metadatas.get(metadatas.size() - 1);
                        metadatas.remove(metadatas.size() - 1);
                        return meta;
                    } else {
                        if (hasNext()) {
                            return next();
                        } else {
                            throw new NoSuchElementException();
                        }
                    }
                }

                @Override
                public boolean hasNext() {
                    if (!metadatas.isEmpty()) {
                        return true;
                    }

                    if (done) {
                        return false;
                    }

                    String line = null;

                    try {
                        line = fbr.readLine();
                    } catch (IOException ioe) {
                        throw new RuntimeException(ioe);
                    }

                    if (null == line) {
                        done = true;
                        return false;
                    }

                    //
                    // Decode/Unwrap/Deserialize the split
                    //

                    byte[] data = OrderPreservingBase64.decode(line.getBytes(Charsets.US_ASCII));
                    if (null != fetchAES) {
                        data = CryptoUtils.unwrap(fetchAES, data);
                    }

                    if (null == data) {
                        throw new RuntimeException("Invalid wrapped content.");
                    }

                    TDeserializer deserializer = new TDeserializer(new TCompactProtocol.Factory());

                    GTSSplit split = new GTSSplit();

                    try {
                        deserializer.deserialize(split, data);
                    } catch (TException te) {
                        throw new RuntimeException(te);
                    }

                    //
                    // Check the expiry
                    //

                    long instant = System.currentTimeMillis();

                    if (instant - split.getTimestamp() > maxSplitAge || instant > split.getExpiry()) {
                        throw new RuntimeException("Split has expired.");
                    }

                    this.metadatas.addAll(split.getMetadatas());

                    // We assume there was at least one metadata instance in the split!!!
                    return true;
                }
            };

            iterators.add(iterator);
        }

        List<Metadata> metas = new ArrayList<Metadata>();
        metas.addAll(metadatas);

        if (!metas.isEmpty()) {
            iterators.add(metas.iterator());
        }

        //
        // Loop over the iterators, storing the read metadata to a temporary file encrypted on disk
        // Data is encrypted using a onetime pad
        //

        final byte[] onetimepad = new byte[(int) Math.min(65537, System.currentTimeMillis() % 100000)];
        new Random().nextBytes(onetimepad);

        final File cache = File.createTempFile(
                Long.toHexString(System.currentTimeMillis()) + "-" + Long.toHexString(System.nanoTime()),
                ".dircache");
        cache.deleteOnExit();

        FileWriter writer = new FileWriter(cache);

        TSerializer serializer = new TSerializer(new TCompactProtocol.Factory());

        int padidx = 0;

        for (Iterator<Metadata> itermeta : iterators) {
            try {
                while (itermeta.hasNext()) {
                    Metadata metadata = itermeta.next();

                    try {
                        byte[] bytes = serializer.serialize(metadata);
                        // Apply onetimepad
                        for (int i = 0; i < bytes.length; i++) {
                            bytes[i] = (byte) (bytes[i] ^ onetimepad[padidx++]);
                            if (padidx >= onetimepad.length) {
                                padidx = 0;
                            }
                        }
                        OrderPreservingBase64.encodeToWriter(bytes, writer);
                        writer.write('\n');
                    } catch (TException te) {
                    }
                }

                if (!itermeta.hasNext() && (itermeta instanceof MetadataIterator)) {
                    try {
                        ((MetadataIterator) itermeta).close();
                    } catch (Exception e) {
                    }
                }
            } catch (Throwable t) {
                throw t;
            } finally {
                if (itermeta instanceof MetadataIterator) {
                    try {
                        ((MetadataIterator) itermeta).close();
                    } catch (Exception e) {
                    }
                }
            }
        }

        writer.close();

        //
        // Create an iterator based on the cache
        //

        MetadataIterator cacheiterator = new MetadataIterator() {

            BufferedReader reader = new BufferedReader(new FileReader(cache));

            private Metadata current = null;
            private boolean done = false;

            private TDeserializer deserializer = new TDeserializer(new TCompactProtocol.Factory());

            int padidx = 0;

            @Override
            public boolean hasNext() {
                if (done) {
                    return false;
                }

                if (null != current) {
                    return true;
                }

                try {
                    String line = reader.readLine();
                    if (null == line) {
                        done = true;
                        return false;
                    }
                    byte[] raw = OrderPreservingBase64.decode(line.getBytes(Charsets.US_ASCII));
                    // Apply one time pad
                    for (int i = 0; i < raw.length; i++) {
                        raw[i] = (byte) (raw[i] ^ onetimepad[padidx++]);
                        if (padidx >= onetimepad.length) {
                            padidx = 0;
                        }
                    }
                    Metadata metadata = new Metadata();
                    try {
                        deserializer.deserialize(metadata, raw);
                        this.current = metadata;
                        return true;
                    } catch (TException te) {
                        LOG.error("", te);
                    }
                } catch (IOException ioe) {
                    LOG.error("", ioe);
                }

                return false;
            }

            @Override
            public Metadata next() {
                if (null != this.current) {
                    Metadata metadata = this.current;
                    this.current = null;
                    return metadata;
                } else {
                    throw new NoSuchElementException();
                }
            }

            @Override
            public void close() throws Exception {
                this.reader.close();
                cache.delete();
            }
        };

        iterators.clear();
        iterators.add(cacheiterator);

        metas = new ArrayList<Metadata>();

        PrintWriter pw = resp.getWriter();

        AtomicReference<Metadata> lastMeta = new AtomicReference<Metadata>(null);
        AtomicLong lastCount = new AtomicLong(0L);

        long fetchtimespan = timespan;

        for (Iterator<Metadata> itermeta : iterators) {
            while (itermeta.hasNext()) {
                metas.add(itermeta.next());

                //
                // Access the data store every 'FETCH_BATCHSIZE' GTS or at the end of each iterator
                //

                if (metas.size() > FETCH_BATCHSIZE || !itermeta.hasNext()) {
                    try (GTSDecoderIterator iterrsc = storeClient.fetch(rtoken, metas, now, fetchtimespan,
                            fromArchive, writeTimestamp)) {
                        GTSDecoderIterator iter = iterrsc;

                        if (unpack) {
                            iter = new UnpackingGTSDecoderIterator(iter, suffix);
                            timespan = Long.MIN_VALUE + 1;
                        }

                        if ("text".equals(format)) {
                            textDump(pw, iter, now, timespan, false, dedup, signed, showAttr, lastMeta,
                                    lastCount, sortMeta);
                        } else if ("fulltext".equals(format)) {
                            textDump(pw, iter, now, timespan, true, dedup, signed, showAttr, lastMeta,
                                    lastCount, sortMeta);
                        } else if ("raw".equals(format)) {
                            rawDump(pw, iter, dedup, signed, timespan, lastMeta, lastCount, sortMeta);
                        } else if ("wrapper".equals(format)) {
                            wrapperDump(pw, iter, dedup, signed, fetchPSK, timespan, lastMeta, lastCount);
                        } else if ("json".equals(format)) {
                            jsonDump(pw, iter, now, timespan, dedup, signed, lastMeta, lastCount);
                        } else if ("tsv".equals(format)) {
                            tsvDump(pw, iter, now, timespan, false, dedup, signed, lastMeta, lastCount,
                                    sortMeta);
                        } else if ("fulltsv".equals(format)) {
                            tsvDump(pw, iter, now, timespan, true, dedup, signed, lastMeta, lastCount,
                                    sortMeta);
                        } else if ("pack".equals(format)) {
                            packedDump(pw, iter, now, timespan, dedup, signed, lastMeta, lastCount,
                                    maxDecoderLen, suffix, chunksize, sortMeta);
                        } else if ("null".equals(format)) {
                            nullDump(iter);
                        } else {
                            textDump(pw, iter, now, timespan, false, dedup, signed, showAttr, lastMeta,
                                    lastCount, sortMeta);
                        }
                    } catch (Throwable t) {
                        LOG.error("", t);
                        Sensision.update(SensisionConstants.CLASS_WARP_FETCH_ERRORS, Sensision.EMPTY_LABELS, 1);
                        if (showErrors) {
                            pw.println();
                            StringWriter sw = new StringWriter();
                            PrintWriter pw2 = new PrintWriter(sw);
                            t.printStackTrace(pw2);
                            pw2.close();
                            sw.flush();
                            String error = URLEncoder.encode(sw.toString(), "UTF-8");
                            pw.println(Constants.EGRESS_FETCH_ERROR_PREFIX + error);
                        }
                        throw new IOException(t);
                    } finally {
                        if (!itermeta.hasNext() && (itermeta instanceof MetadataIterator)) {
                            try {
                                ((MetadataIterator) itermeta).close();
                            } catch (Exception e) {
                            }
                        }
                    }

                    //
                    // Reset 'metas'
                    //

                    metas.clear();
                }
            }

            if (!itermeta.hasNext() && (itermeta instanceof MetadataIterator)) {
                try {
                    ((MetadataIterator) itermeta).close();
                } catch (Exception e) {
                }
            }
        }

        Sensision.update(SensisionConstants.SENSISION_CLASS_CONTINUUM_FETCH_REQUESTS, labels, 1);
    } catch (Exception e) {
        if (!resp.isCommitted()) {
            resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e.getMessage());
            return;
        }
    }
}

From source file:com.unboundid.scim.tools.SCIMQueryRate.java

/**
 * Performs the actual processing for this tool.  In this case, it gets a
 * connection to the directory server and uses it to perform the requested
 * searches.//from  w  w w  . ja v a2  s.c  o  m
 *
 * @return  The result code for the processing that was performed.
 */
@Override()
public ResultCode doToolProcessing() {
    //Initalize the Debugger
    Debug.setEnabled(true);
    Debug.getLogger().addHandler(new ConsoleHandler());
    Debug.getLogger().setUseParentHandlers(false);

    // Determine the random seed to use.
    final Long seed;
    if (randomSeed.isPresent()) {
        seed = Long.valueOf(randomSeed.getValue());
    } else {
        seed = null;
    }

    // Create a value pattern for the filter.
    final ValuePattern filterPattern;
    boolean isQuery = true;
    if (filter.isPresent()) {
        try {
            filterPattern = new ValuePattern(filter.getValue(), seed);
        } catch (ParseException pe) {
            Debug.debugException(pe);
            err(ERR_QUERY_TOOL_BAD_FILTER_PATTERN.get(pe.getMessage()));
            return ResultCode.PARAM_ERROR;
        }
    } else if (resourceId.isPresent()) {
        isQuery = false;
        try {
            filterPattern = new ValuePattern(resourceId.getValue());
        } catch (ParseException pe) {
            Debug.debugException(pe);
            err(ERR_QUERY_TOOL_BAD_RESOURCE_ID_PATTERN.get(pe.getMessage()));
            return ResultCode.PARAM_ERROR;
        }
    } else {
        filterPattern = null;
    }

    // Get the attributes to return.
    final String[] attrs;
    if (attributes.isPresent()) {
        final List<String> attrList = attributes.getValues();
        attrs = new String[attrList.size()];
        attrList.toArray(attrs);
    } else {
        attrs = NO_STRINGS;
    }

    // If the --ratePerSecond option was specified, then limit the rate
    // accordingly.
    FixedRateBarrier fixedRateBarrier = null;
    if (ratePerSecond.isPresent()) {
        final int intervalSeconds = collectionInterval.getValue();
        final int ratePerInterval = ratePerSecond.getValue() * intervalSeconds;

        fixedRateBarrier = new FixedRateBarrier(1000L * intervalSeconds, ratePerInterval);
    }

    // Determine whether to include timestamps in the output and if so what
    // format should be used for them.
    final boolean includeTimestamp;
    final String timeFormat;
    if (timestampFormat.getValue().equalsIgnoreCase("with-date")) {
        includeTimestamp = true;
        timeFormat = "dd/MM/yyyy HH:mm:ss";
    } else if (timestampFormat.getValue().equalsIgnoreCase("without-date")) {
        includeTimestamp = true;
        timeFormat = "HH:mm:ss";
    } else {
        includeTimestamp = false;
        timeFormat = null;
    }

    // Determine whether any warm-up intervals should be run.
    final long totalIntervals;
    final boolean warmUp;
    int remainingWarmUpIntervals = warmUpIntervals.getValue();
    if (remainingWarmUpIntervals > 0) {
        warmUp = true;
        totalIntervals = 0L + numIntervals.getValue() + remainingWarmUpIntervals;
    } else {
        warmUp = true;
        totalIntervals = 0L + numIntervals.getValue();
    }

    // Create the table that will be used to format the output.
    final OutputFormat outputFormat;
    if (csvFormat.isPresent()) {
        outputFormat = OutputFormat.CSV;
    } else {
        outputFormat = OutputFormat.COLUMNS;
    }

    final ColumnFormatter formatter = new ColumnFormatter(includeTimestamp, timeFormat, outputFormat, " ",
            new FormattableColumn(15, HorizontalAlignment.RIGHT, "Recent", "Queries/Sec"),
            new FormattableColumn(15, HorizontalAlignment.RIGHT, "Recent", "Avg Dur ms"),
            new FormattableColumn(15, HorizontalAlignment.RIGHT, "Recent", "Resources/Query"),
            new FormattableColumn(15, HorizontalAlignment.RIGHT, "Recent", "Errors/Sec"),
            new FormattableColumn(15, HorizontalAlignment.RIGHT, "Overall", "Queries/Sec"),
            new FormattableColumn(15, HorizontalAlignment.RIGHT, "Overall", "Avg Dur ms"));

    // Create values to use for statistics collection.
    final AtomicLong queryCounter = new AtomicLong(0L);
    final AtomicLong resourceCounter = new AtomicLong(0L);
    final AtomicLong errorCounter = new AtomicLong(0L);
    final AtomicLong queryDurations = new AtomicLong(0L);

    // Determine the length of each interval in milliseconds.
    final long intervalMillis = 1000L * collectionInterval.getValue();

    // We will use Apache's HttpClient library for this tool.
    SSLUtil sslUtil;
    try {
        sslUtil = createSSLUtil();
    } catch (LDAPException e) {
        debugException(e);
        err(e.getMessage());
        return e.getResultCode();
    }

    RegistryBuilder<ConnectionSocketFactory> registryBuilder = RegistryBuilder.create();
    final String schemeName;
    if (sslUtil != null) {
        try {
            SSLConnectionSocketFactory sslConnectionSocketFactory = new SSLConnectionSocketFactory(
                    sslUtil.createSSLContext("TLS"), new NoopHostnameVerifier());
            schemeName = "https";
            registryBuilder.register(schemeName, sslConnectionSocketFactory);
        } catch (GeneralSecurityException e) {
            debugException(e);
            err(ERR_SCIM_TOOL_CANNOT_CREATE_SSL_CONTEXT.get(getExceptionMessage(e)));
            return ResultCode.LOCAL_ERROR;
        }
    } else {
        schemeName = "http";
        registryBuilder.register(schemeName, new PlainConnectionSocketFactory());
    }
    final Registry<ConnectionSocketFactory> socketFactoryRegistry = registryBuilder.build();

    RequestConfig requestConfig = RequestConfig.custom().setConnectionRequestTimeout(30000)
            .setExpectContinueEnabled(true).build();

    SocketConfig socketConfig = SocketConfig.custom().setSoTimeout(30000).setSoReuseAddress(true).build();

    final PoolingHttpClientConnectionManager mgr = new PoolingHttpClientConnectionManager(
            socketFactoryRegistry);
    mgr.setMaxTotal(numThreads.getValue());
    mgr.setDefaultMaxPerRoute(numThreads.getValue());
    mgr.setDefaultSocketConfig(socketConfig);
    mgr.setValidateAfterInactivity(-1);

    ClientConfig jerseyConfig = new ClientConfig();

    jerseyConfig.property(ApacheClientProperties.CONNECTION_MANAGER, mgr);
    jerseyConfig.property(ApacheClientProperties.REQUEST_CONFIG, requestConfig);
    ApacheConnectorProvider connectorProvider = new ApacheConnectorProvider();
    jerseyConfig.connectorProvider(connectorProvider);

    if (authID.isPresent()) {
        try {
            final String password;
            if (authPassword.isPresent()) {
                password = authPassword.getValue();
            } else if (authPasswordFile.isPresent()) {
                password = authPasswordFile.getNonBlankFileLines().get(0);
            } else {
                password = null;
            }

            BasicCredentialsProvider provider = new BasicCredentialsProvider();
            provider.setCredentials(new AuthScope(host.getValue(), port.getValue()),
                    new UsernamePasswordCredentials(authID.getValue(), password));

            jerseyConfig.property(ApacheClientProperties.CREDENTIALS_PROVIDER, provider);
            jerseyConfig.property(ApacheClientProperties.PREEMPTIVE_BASIC_AUTHENTICATION, true);
        } catch (IOException e) {
            Debug.debugException(e);
            err(ERR_QUERY_TOOL_SET_BASIC_AUTH.get(e.getMessage()));
            return ResultCode.LOCAL_ERROR;
        }
    } else if (bearerToken.isPresent()) {
        jerseyConfig.register(new ClientRequestFilter() {
            public void filter(final ClientRequestContext clientRequestContext) throws IOException {
                try {
                    clientRequestContext.getHeaders().add("Authorization", "Bearer " + bearerToken.getValue());
                } catch (Exception ex) {
                    throw new RuntimeException("Unable to add authorization handler", ex);
                }
            }
        });
    }

    // Create the SCIM client to use for the queries.
    final URI uri;
    try {
        final String path;
        if (contextPath.getValue().startsWith("/")) {
            path = contextPath.getValue();
        } else {
            path = "/" + contextPath.getValue();
        }
        uri = new URI(schemeName, null, host.getValue(), port.getValue(), path, null, null);
    } catch (URISyntaxException e) {
        Debug.debugException(e);
        err(ERR_QUERY_TOOL_CANNOT_CREATE_URL.get(e.getMessage()));
        return ResultCode.OTHER;
    }
    final SCIMService service = new SCIMService(uri, jerseyConfig);

    if (xmlFormat.isPresent()) {
        service.setContentType(MediaType.APPLICATION_XML_TYPE);
        service.setAcceptType(MediaType.APPLICATION_XML_TYPE);
    }

    // Retrieve the resource schema.
    final ResourceDescriptor resourceDescriptor;
    try {
        resourceDescriptor = service.getResourceDescriptor(resourceName.getValue(), null);
        if (resourceDescriptor == null) {
            throw new ResourceNotFoundException(
                    "Resource " + resourceName.getValue() + " is not defined by the service provider");
        }
    } catch (SCIMException e) {
        Debug.debugException(e);
        err(ERR_QUERY_TOOL_RETRIEVE_RESOURCE_SCHEMA.get(e.getMessage()));
        return ResultCode.OTHER;
    }

    final SCIMEndpoint<? extends BaseResource> endpoint = service.getEndpoint(resourceDescriptor,
            BaseResource.BASE_RESOURCE_FACTORY);

    // Create the threads to use for the searches.
    final CyclicBarrier barrier = new CyclicBarrier(numThreads.getValue() + 1);
    final QueryRateThread[] threads = new QueryRateThread[numThreads.getValue()];
    for (int i = 0; i < threads.length; i++) {
        threads[i] = new QueryRateThread(i, isQuery, endpoint, filterPattern, attrs, barrier, queryCounter,
                resourceCounter, queryDurations, errorCounter, fixedRateBarrier);
        threads[i].start();
    }

    // Display the table header.
    for (final String headerLine : formatter.getHeaderLines(true)) {
        out(headerLine);
    }

    // Indicate that the threads can start running.
    try {
        barrier.await();
    } catch (Exception e) {
        Debug.debugException(e);
    }
    long overallStartTime = System.nanoTime();
    long nextIntervalStartTime = System.currentTimeMillis() + intervalMillis;

    boolean setOverallStartTime = false;
    long lastDuration = 0L;
    long lastNumEntries = 0L;
    long lastNumErrors = 0L;
    long lastNumSearches = 0L;
    long lastEndTime = System.nanoTime();
    for (long i = 0; i < totalIntervals; i++) {
        final long startTimeMillis = System.currentTimeMillis();
        final long sleepTimeMillis = nextIntervalStartTime - startTimeMillis;
        nextIntervalStartTime += intervalMillis;
        try {
            if (sleepTimeMillis > 0) {
                Thread.sleep(sleepTimeMillis);
            }
        } catch (Exception e) {
            Debug.debugException(e);
        }

        final long endTime = System.nanoTime();
        final long intervalDuration = endTime - lastEndTime;

        final long numSearches;
        final long numEntries;
        final long numErrors;
        final long totalDuration;
        if (warmUp && (remainingWarmUpIntervals > 0)) {
            numSearches = queryCounter.getAndSet(0L);
            numEntries = resourceCounter.getAndSet(0L);
            numErrors = errorCounter.getAndSet(0L);
            totalDuration = queryDurations.getAndSet(0L);
        } else {
            numSearches = queryCounter.get();
            numEntries = resourceCounter.get();
            numErrors = errorCounter.get();
            totalDuration = queryDurations.get();
        }

        final long recentNumSearches = numSearches - lastNumSearches;
        final long recentNumEntries = numEntries - lastNumEntries;
        final long recentNumErrors = numErrors - lastNumErrors;
        final long recentDuration = totalDuration - lastDuration;

        final double numSeconds = intervalDuration / 1000000000.0d;
        final double recentSearchRate = recentNumSearches / numSeconds;
        final double recentErrorRate = recentNumErrors / numSeconds;

        final double recentAvgDuration;
        final double recentEntriesPerSearch;
        if (recentNumSearches > 0L) {
            recentEntriesPerSearch = 1.0d * recentNumEntries / recentNumSearches;
            recentAvgDuration = 1.0d * recentDuration / recentNumSearches / 1000000;
        } else {
            recentEntriesPerSearch = 0.0d;
            recentAvgDuration = 0.0d;
        }

        if (warmUp && (remainingWarmUpIntervals > 0)) {
            out(formatter.formatRow(recentSearchRate, recentAvgDuration, recentEntriesPerSearch,
                    recentErrorRate, "warming up", "warming up"));

            remainingWarmUpIntervals--;
            if (remainingWarmUpIntervals == 0) {
                out(INFO_QUERY_TOOL_WARM_UP_COMPLETED.get());
                setOverallStartTime = true;
            }
        } else {
            if (setOverallStartTime) {
                overallStartTime = lastEndTime;
                setOverallStartTime = false;
            }

            final double numOverallSeconds = (endTime - overallStartTime) / 1000000000.0d;
            final double overallSearchRate = numSearches / numOverallSeconds;

            final double overallAvgDuration;
            if (numSearches > 0L) {
                overallAvgDuration = 1.0d * totalDuration / numSearches / 1000000;
            } else {
                overallAvgDuration = 0.0d;
            }

            out(formatter.formatRow(recentSearchRate, recentAvgDuration, recentEntriesPerSearch,
                    recentErrorRate, overallSearchRate, overallAvgDuration));

            lastNumSearches = numSearches;
            lastNumEntries = numEntries;
            lastNumErrors = numErrors;
            lastDuration = totalDuration;
        }

        lastEndTime = endTime;
    }

    // Stop all of the threads.
    ResultCode resultCode = ResultCode.SUCCESS;
    for (final QueryRateThread t : threads) {
        t.signalShutdown();
    }

    // Interrupt any blocked threads after a grace period.
    final WakeableSleeper sleeper = new WakeableSleeper();
    sleeper.sleep(1000);
    mgr.shutdown();

    for (final QueryRateThread t : threads) {
        final ResultCode r = t.waitForShutdown();
        if (resultCode == ResultCode.SUCCESS) {
            resultCode = r;
        }
    }

    return resultCode;
}

From source file:jduagui.Controller.java

public static void getExtensions(String startPath, Map<String, Extension> exts) throws IOException {
    final AtomicReference<String> extension = new AtomicReference<>("");
    final File f = new File(startPath);
    final String str = "";
    Path path = Paths.get(startPath);

    Files.walkFileTree(path, new SimpleFileVisitor<Path>() {
        @Override//from   w  w  w.j  a v  a  2  s. c o m
        public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
            storageCache.put(file.toAbsolutePath().toString(), attrs.size());
            extension.set(FilenameUtils.getExtension(file.toAbsolutePath().toString()));

            if (extension.get().equals(str)) {
                if (exts.containsKey(noExt)) {
                    exts.get(noExt).countIncrement();
                    exts.get(noExt).increaseSize(attrs.size());
                } else {
                    exts.put(noExt, new Extension(new AtomicLong(1), new AtomicLong(attrs.size())));
                }
            } else {

                if (exts.containsKey(extension.get())) {
                    exts.get(extension.get()).countIncrement();
                    exts.get(extension.get()).increaseSize(attrs.size());
                } else {
                    exts.put(extension.get(), new Extension(new AtomicLong(1), new AtomicLong(attrs.size())));
                }
            }
            return FileVisitResult.CONTINUE;
        }

        @Override
        public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException {
            return FileVisitResult.CONTINUE;
        }
    });
}