Example usage for java.io IOException addSuppressed

List of usage examples for java.io IOException addSuppressed

Introduction

In this page you can find the example usage for java.io IOException addSuppressed.

Prototype

public final synchronized void addSuppressed(Throwable exception) 

Source Link

Document

Appends the specified exception to the exceptions that were suppressed in order to deliver this exception.

Usage

From source file:org.spf4j.perf.tsdb.TimeSeriesDatabase.java

public void flush() throws IOException {
    synchronized (path) {
        List<Map.Entry<String, DataFragment>> lwriteDataFragments;
        synchronized (writeDataFragments) {
            if (writeDataFragments.isEmpty()) {
                return;
            }//from   www.j a v  a  2 s.  c  o m
            lwriteDataFragments = new ArrayList<>(writeDataFragments.entrySet());
            writeDataFragments.clear();
        }
        FileLock lock = ch.lock();
        try {
            for (Map.Entry<String, DataFragment> entry : lwriteDataFragments) {
                DataFragment writeDataFragment = entry.getValue();
                String groupName = entry.getKey();
                file.seek(file.length());
                writeDataFragment.setLocation(file.getFilePointer());
                writeDataFragment.writeTo(file);
                TSTable colInfo = tables.get(groupName);
                colInfo = new TSTable(file, colInfo.getLocation()); // reread colInfo
                tables.put(groupName, colInfo); // update colInfo
                final long lastDataFragment = colInfo.getLastDataFragment();
                final long location = writeDataFragment.getLocation();
                if (lastDataFragment != 0) {
                    DataFragment.setNextDataFragment(lastDataFragment, location, file);
                } else {
                    colInfo.setFirstDataFragment(location, file);
                }
                colInfo.setLastDataFragment(location, file);
            }
            sync();
        } catch (IOException | RuntimeException e) {
            try {
                lock.release();
                throw e;
            } catch (IOException ex) {
                ex.addSuppressed(e);
                throw ex;
            }
        }
        lock.release();
    }
}

From source file:org.spf4j.perf.tsdb.TimeSeriesDatabase.java

public TimeSeriesDatabase(final String pathToDatabaseFile, final boolean isWrite, final byte... metaData)
        throws IOException {
    file = new RandomAccessFile(pathToDatabaseFile, isWrite ? "rw" : "r");
    // uniques per process string for sync purposes.
    this.path = INTERNER.intern(new File(pathToDatabaseFile).getPath());
    tables = new ConcurrentHashMap<>();
    writeDataFragments = new HashMap<>();
    // read or create header
    synchronized (path) {
        this.ch = file.getChannel();
        FileLock lock;//  w ww.  j av  a  2s .c o m
        if (isWrite) {
            lock = ch.lock();
        } else {
            lock = ch.lock(0, Long.MAX_VALUE, true);
        }
        try {
            if (file.length() == 0) {
                this.header = new Header(VERSION, metaData);
                this.header.writeTo(file);
                this.toc = new TableOfContents(file.getFilePointer());
                this.toc.writeTo(file);
            } else {
                this.header = new Header(file);
                this.toc = new TableOfContents(file);
            }
        } catch (IOException | RuntimeException e) {
            try {
                lock.release();
                throw e;
            } catch (IOException ex) {
                ex.addSuppressed(e);
                throw ex;
            }
        }
        lock.release();
        lock = ch.lock(0, Long.MAX_VALUE, true);
        try {
            readTableInfos();
        } catch (IOException | RuntimeException e) {
            try {
                lock.release();
                throw e;
            } catch (IOException ex) {
                ex.addSuppressed(e);
                throw ex;
            }
        }
        lock.release();
    }
}

From source file:org.spf4j.perf.tsdb.TimeSeriesDatabase.java

/**
 * Read measurements from table.//from  w ww  . j  a  va  2 s . co m
 *
 * @param tableName
 * @param startTime start time including
 * @param endTime end time including
 * @return
 * @throws IOException
 */
private TimeSeries read(final long startTime, final long endTime, final long startAtFragment,
        final long endAtFragment, final boolean skipFirst) throws IOException {
    synchronized (path) {
        TLongArrayList timeStamps = new TLongArrayList();
        List<long[]> data = new ArrayList<>();
        if (startAtFragment > 0) {
            FileLock lock = ch.lock(0, Long.MAX_VALUE, true);
            try {
                DataFragment frag;
                long nextFragmentLocation = startAtFragment;
                boolean last = false;
                boolean psFirst = skipFirst;
                do {
                    if (nextFragmentLocation == endAtFragment) {
                        last = true;
                    }
                    file.seek(nextFragmentLocation);
                    frag = new DataFragment(file);
                    if (psFirst) {
                        psFirst = false;
                    } else {
                        long fragStartTime = frag.getStartTimeMillis();
                        if (fragStartTime >= startTime) {
                            TIntArrayList fragTimestamps = frag.getTimestamps();
                            int nr = 0;
                            for (int i = 0; i < fragTimestamps.size(); i++) {
                                long ts = fragStartTime + fragTimestamps.get(i);
                                if (ts <= endTime) {
                                    timeStamps.add(ts);
                                    nr++;
                                } else {
                                    break;
                                }
                            }
                            int i = 0;
                            for (long[] d : frag.getData()) {
                                if (i < nr) {
                                    data.add(d);
                                } else {
                                    break;
                                }
                                nr++;
                            }
                            if (fragTimestamps.size() > nr) {
                                break;
                            }
                        }
                    }
                    nextFragmentLocation = frag.getNextDataFragment();
                } while (nextFragmentLocation > 0 && !last);
            } catch (IOException | RuntimeException e) {
                try {
                    lock.release();
                    throw e;
                } catch (IOException ex) {
                    ex.addSuppressed(e);
                    throw ex;
                }
            }
            lock.release();
        }
        return new TimeSeries(timeStamps.toArray(), data.toArray(new long[data.size()][]));
    }
}

From source file:com.android.builder.internal.packaging.sign.SignatureExtension.java

/**
 * Computes the digital signature of an array of data.
 *
 * @param data the data/*ww  w  . j a v a  2  s.c  o m*/
 * @return the digital signature
 * @throws IOException failed to read/write signature data
 * @throws CertificateEncodingException failed to sign the data
 * @throws OperatorCreationException failed to sign the data
 * @throws CMSException failed to sign the data
 */
private byte[] computePkcs7Signature(@NonNull byte[] data)
        throws IOException, CertificateEncodingException, OperatorCreationException, CMSException {
    CMSProcessableByteArray cmsData = new CMSProcessableByteArray(data);

    ArrayList<X509Certificate> certList = new ArrayList<>();
    certList.add(mCertificate);
    JcaCertStore certs = new JcaCertStore(certList);

    CMSSignedDataGenerator gen = new CMSSignedDataGenerator();
    String signatureAlgName = mSignatureAlgorithm.signatureAlgorithmName(mDigestAlgorithm);
    ContentSigner shaSigner = new JcaContentSignerBuilder(signatureAlgName).build(mPrivateKey);
    gen.addSignerInfoGenerator(
            new JcaSignerInfoGeneratorBuilder(new JcaDigestCalculatorProviderBuilder().build())
                    .setDirectSignature(true).build(shaSigner, mCertificate));
    gen.addCertificates(certs);
    CMSSignedData sigData = gen.generate(cmsData, false);

    ByteArrayOutputStream outputBytes = new ByteArrayOutputStream();

    /*
     * DEROutputStream is not closeable! OMG!
     */
    DEROutputStream dos = null;
    try (ASN1InputStream asn1 = new ASN1InputStream(sigData.getEncoded())) {
        dos = new DEROutputStream(outputBytes);
        dos.writeObject(asn1.readObject());

        DEROutputStream toClose = dos;
        dos = null;
        toClose.close();
    } catch (IOException e) {
        if (dos != null) {
            try {
                dos.close();
            } catch (IOException ee) {
                e.addSuppressed(ee);
            }
        }
    }

    return outputBytes.toByteArray();
}

From source file:hudson.cli.CLI.java

/**
 * @deprecated Specific to {@link Mode#REMOTING}.
 *//*from  ww w  .  ja v a 2  s . c  o m*/
@Deprecated
/*package*/ CLI(CLIConnectionFactory factory) throws IOException, InterruptedException {
    URL jenkins = factory.jenkins;
    this.httpsProxyTunnel = factory.httpsProxyTunnel;
    this.authorization = factory.authorization;
    ExecutorService exec = factory.exec;

    ownsPool = exec == null;
    pool = exec != null ? exec
            : Executors
                    .newCachedThreadPool(new NamingThreadFactory(Executors.defaultThreadFactory(), "CLI.pool"));

    Channel _channel;
    try {
        _channel = connectViaCliPort(jenkins, getCliTcpPort(jenkins));
    } catch (IOException e) {
        LOGGER.log(Level.FINE, "Failed to connect via CLI port. Falling back to HTTP", e);
        try {
            _channel = connectViaHttp(jenkins);
        } catch (IOException e2) {
            e.addSuppressed(e2);
            throw e;
        }
    }
    this.channel = _channel;

    // execute the command
    entryPoint = (CliEntryPoint) _channel.waitForRemoteProperty(CliEntryPoint.class.getName());

    if (entryPoint.protocolVersion() != CliEntryPoint.VERSION)
        throw new IOException(Messages.CLI_VersionMismatch());
}

From source file:net.oneandone.stool.Start.java

public FileNode tomcatOpt(String version) throws IOException {
    IOException failed;
    FileNode download;//w ww.ja  va2s .co  m
    String name;
    FileNode base;

    name = tomcatName(version);
    download = session.home.join("downloads", name + ".tar.gz");
    if (!download.exists()) {
        console.info.println("downloading tomcat ...");
        try {
            downloadFile(console,
                    "http://archive.apache.org/dist/tomcat/tomcat-7/v" + version + "/bin/" + name + ".tar.gz",
                    download);
        } catch (IOException e) {
            failed = new IOException(
                    "Cannot download Tomcat " + version + ". Please provide it manually at " + download);
            failed.addSuppressed(e);
            throw failed;
        }
        download.checkFile();
    }
    base = session.home.join("tomcat/" + name);
    if (!base.exists()) {
        tar(base.getParent(), "zxf", download.getAbsolute(), name + "/lib", name + "/bin");
        base.checkDirectory();
    }
    return download;
}

From source file:org.ops4j.pax.url.mvn.internal.AetherBasedResolver.java

private File resolve(List<LocalRepository> defaultRepos, List<RemoteRepository> remoteRepos, Artifact artifact)
        throws IOException {

    if (artifact.getExtension().isEmpty()) {
        artifact = new DefaultArtifact(artifact.getGroupId(), artifact.getArtifactId(),
                artifact.getClassifier(), "jar", artifact.getVersion());
    }//w  ww  .jav  a 2  s.c  o  m

    // Try with default repositories
    try {
        VersionConstraint vc = new GenericVersionScheme().parseVersionConstraint(artifact.getVersion());
        if (vc.getVersion() != null) {
            for (LocalRepository repo : defaultRepos) {
                RepositorySystemSession session = newSession(repo);
                try {
                    return m_repoSystem.resolveArtifact(session, new ArtifactRequest(artifact, null, null))
                            .getArtifact().getFile();
                } catch (ArtifactResolutionException e) {
                    // Ignore
                } finally {
                    releaseSession(session);
                }
            }
        }
    } catch (InvalidVersionSpecificationException e) {
        // Should not happen
    }
    RepositorySystemSession session = newSession(null);
    try {
        artifact = resolveLatestVersionRange(session, remoteRepos, artifact);
        return m_repoSystem.resolveArtifact(session, new ArtifactRequest(artifact, remoteRepos, null))
                .getArtifact().getFile();
    } catch (ArtifactResolutionException e) {
        // we know there's one ArtifactResult, because there was one ArtifactRequest
        ArtifactResolutionException original = new ArtifactResolutionException(e.getResults(),
                "Error resolving artifact " + artifact.toString(), null);
        original.setStackTrace(e.getStackTrace());

        List<String> messages = new ArrayList<>(e.getResult().getExceptions().size());
        List<Exception> suppressed = new ArrayList<>();
        for (Exception ex : e.getResult().getExceptions()) {
            messages.add(ex.getMessage() == null ? ex.getClass().getName() : ex.getMessage());
            suppressed.add(ex);
        }
        IOException exception = new IOException(original.getMessage() + ": " + messages, original);
        for (Exception ex : suppressed) {
            exception.addSuppressed(ex);
        }
        LOG.warn(exception.getMessage(), exception);

        throw exception;
    } catch (RepositoryException e) {
        throw new IOException("Error resolving artifact " + artifact.toString(), e);
    } finally {
        releaseSession(session);
    }
}

From source file:org.apache.druid.data.input.impl.prefetch.PrefetchableTextFilesFirehoseFactory.java

@Override
public Firehose connect(StringInputRowParser firehoseParser, @Nullable File temporaryDirectory)
        throws IOException {
    if (objects == null) {
        objects = ImmutableList.copyOf(Preconditions.checkNotNull(initObjects(), "objects"));
    }//from   w  w w.j av a 2  s .  com

    if (cacheManager.isEnabled() || prefetchConfig.getMaxFetchCapacityBytes() > 0) {
        Preconditions.checkNotNull(temporaryDirectory, "temporaryDirectory");
        Preconditions.checkArgument(temporaryDirectory.exists(), "temporaryDirectory[%s] does not exist",
                temporaryDirectory);
        Preconditions.checkArgument(temporaryDirectory.isDirectory(),
                "temporaryDirectory[%s] is not a directory", temporaryDirectory);
    }

    LOG.info("Create a new firehose for [%d] objects", objects.size());

    // fetchExecutor is responsible for background data fetching
    final ExecutorService fetchExecutor = Execs.singleThreaded("firehose_fetch_%d");
    final FileFetcher<T> fetcher = new FileFetcher<T>(cacheManager, objects, fetchExecutor, temporaryDirectory,
            prefetchConfig, new ObjectOpenFunction<T>() {
                @Override
                public InputStream open(T object) throws IOException {
                    return openObjectStream(object);
                }

                @Override
                public InputStream open(T object, long start) throws IOException {
                    return openObjectStream(object, start);
                }
            }, getRetryCondition(), getMaxFetchRetry());

    return new FileIteratingFirehose(new Iterator<LineIterator>() {
        @Override
        public boolean hasNext() {
            return fetcher.hasNext();
        }

        @Override
        public LineIterator next() {
            if (!hasNext()) {
                throw new NoSuchElementException();
            }

            final OpenedObject<T> openedObject = fetcher.next();
            try {
                return new ResourceCloseableLineIterator(new InputStreamReader(
                        wrapObjectStream(openedObject.getObject(), openedObject.getObjectStream()),
                        StandardCharsets.UTF_8), openedObject.getResourceCloser());
            } catch (IOException e) {
                try {
                    openedObject.getResourceCloser().close();
                } catch (Throwable t) {
                    e.addSuppressed(t);
                }
                throw new RuntimeException(e);
            }
        }
    }, firehoseParser, () -> {
        fetchExecutor.shutdownNow();
        try {
            Preconditions.checkState(
                    fetchExecutor.awaitTermination(prefetchConfig.getFetchTimeout(), TimeUnit.MILLISECONDS));
        } catch (InterruptedException e) {
            Thread.currentThread().interrupt();
            throw new ISE("Failed to shutdown fetch executor during close");
        }
    });
}

From source file:org.apache.druid.java.util.common.CompressionUtils.java

/**
 * Decompress an input stream from a file, based on the filename.
 *///w  ww. java  2  s .c o  m
public static InputStream decompress(final InputStream in, final String fileName) throws IOException {
    if (fileName.endsWith(GZ_SUFFIX)) {
        return gzipInputStream(in);
    } else if (fileName.endsWith(BZ2_SUFFIX)) {
        return new BZip2CompressorInputStream(in, true);
    } else if (fileName.endsWith(XZ_SUFFIX)) {
        return new XZCompressorInputStream(in, true);
    } else if (fileName.endsWith(SNAPPY_SUFFIX)) {
        return new FramedSnappyCompressorInputStream(in);
    } else if (fileName.endsWith(ZSTD_SUFFIX)) {
        return new ZstdCompressorInputStream(in);
    } else if (fileName.endsWith(ZIP_SUFFIX)) {
        // This reads the first file in the archive.
        final ZipInputStream zipIn = new ZipInputStream(in, StandardCharsets.UTF_8);
        try {
            final ZipEntry nextEntry = zipIn.getNextEntry();
            if (nextEntry == null) {
                zipIn.close();

                // No files in the archive - return an empty stream.
                return new ByteArrayInputStream(new byte[0]);
            }
            return zipIn;
        } catch (IOException e) {
            try {
                zipIn.close();
            } catch (IOException e2) {
                e.addSuppressed(e2);
            }
            throw e;
        }
    } else {
        return in;
    }
}

From source file:org.apache.druid.storage.cassandra.CassandraDataSegmentPuller.java

FileUtils.FileCopyResult getSegmentFiles(final String key, final File outDir) throws SegmentLoadingException {
    log.info("Pulling index from C* at path[%s] to outDir[%s]", key, outDir);
    try {/*from   w  w w.  java 2 s  .  c  o m*/
        org.apache.commons.io.FileUtils.forceMkdir(outDir);
    } catch (IOException e) {
        throw new SegmentLoadingException(e, "");
    }

    long startTime = System.currentTimeMillis();
    final File tmpFile = new File(outDir, "index.zip");
    log.info("Pulling to temporary local cache [%s]", tmpFile.getAbsolutePath());

    final FileUtils.FileCopyResult localResult;
    try {
        localResult = RetryUtils.retry(() -> {
            try (OutputStream os = new FileOutputStream(tmpFile)) {
                ChunkedStorage.newReader(indexStorage, key, os).withBatchSize(BATCH_SIZE)
                        .withConcurrencyLevel(CONCURRENCY).call();
            }
            return new FileUtils.FileCopyResult(tmpFile);
        }, Predicates.alwaysTrue(), 10);
    } catch (Exception e) {
        throw new SegmentLoadingException(e, "Unable to copy key [%s] to file [%s]", key,
                tmpFile.getAbsolutePath());
    }
    try {
        final FileUtils.FileCopyResult result = CompressionUtils.unzip(tmpFile, outDir);
        log.info("Pull of file[%s] completed in %,d millis (%s bytes)", key,
                System.currentTimeMillis() - startTime, result.size());
        return result;
    } catch (Exception e) {
        try {
            org.apache.commons.io.FileUtils.deleteDirectory(outDir);
        } catch (IOException e1) {
            log.error(e1, "Error clearing segment directory [%s]", outDir.getAbsolutePath());
            e.addSuppressed(e1);
        }
        throw new SegmentLoadingException(e, e.getMessage());
    } finally {
        if (!tmpFile.delete()) {
            log.warn("Could not delete cache file at [%s]", tmpFile.getAbsolutePath());
        }
    }
}