Example usage for java.nio.file Files newByteChannel

List of usage examples for java.nio.file Files newByteChannel

Introduction

In this page you can find the example usage for java.nio.file Files newByteChannel.

Prototype

public static SeekableByteChannel newByteChannel(Path path, Set<? extends OpenOption> options,
        FileAttribute<?>... attrs) throws IOException 

Source Link

Document

Opens or creates a file, returning a seekable byte channel to access the file.

Usage

From source file:Main.java

public static void writeUrlToFileNIO(String urlToRead, String folderToWrite, String fileName)
        throws MalformedURLException, IOException {
    URL urlIn = new URL(urlToRead);
    File folderOut = Paths.get(folderToWrite).toFile();
    if (!(folderOut.exists() || folderOut.mkdirs())) {
        throw new RuntimeException("could not create folder " + folderToWrite);
    }//from w  w w .  ja  v a  2s .co m
    Path pathOut = Paths.get(folderToWrite, fileName);
    try (ReadableByteChannel in = Channels.newChannel(new BufferedInputStream(urlIn.openStream()));
            WritableByteChannel out = Files.newByteChannel(pathOut, CREATE, WRITE);) {
        transfer(in, out);
    }
}

From source file:org.ulyssis.ipp.reader.Reader.java

/**
 * Create a new reader and connect to Redis.
 * /*from   www. ja va 2  s  . c  o  m*/
 * options are passed in, rather than
 * accessed through a singleton or such, to improve testability
 * and modularity, and to prevent hidden dependencies and
 * eventual threading issues.
 * 
 * @param options
 *           The command line options to use for this reader.
 */
public Reader(ReaderOptions options) {
    this.options = options;
    this.readerConfig = Config.getCurrentConfig().getReader(options.getId());
    this.llrpReader = new LLRPReader(this::messageReceived, this::errorOccurred);

    if (readerConfig.getType() == ReaderConfig.Type.SIMULATOR) {
        executorService = Executors.newSingleThreadScheduledExecutor();
    } else {
        executorService = null;
    }

    if (options.getNoRedis()) {
        LOG.info("Not using Redis, setting initial update count to 0.");
        this.updateCount = 0L;
        this.jedis = null;
    } else {
        this.jedis = JedisHelper.get(readerConfig.getURI());
        try {
            this.updateCount = jedis.llen("updates");
        } catch (JedisConnectionException e) {
            LOG.error("Couldn't connect to Jedis when getting update count. Setting 0 instead.", e);
            this.updateCount = 0L; // TODO: Is 0 appropriate?
        }
    }
    String statusChannel = Config.getCurrentConfig().getStatusChannel();
    this.statusReporter = new StatusReporter(readerConfig.getURI(), statusChannel);
    String controlChannel = Config.getCurrentConfig().getControlChannel();
    this.commandProcessor = new CommandProcessor(readerConfig.getURI(), controlChannel, statusReporter);
    commandProcessor.addHandler(new PingHandler());
    this.updateChannel = JedisHelper.dbLocalChannel(Config.getCurrentConfig().getUpdateChannel(),
            readerConfig.getURI());

    options.getReplayFile().ifPresent(replayFile -> {
        try {
            LOG.info("Opening replay file: {}", replayFile);
            ByteChannel channel = Files.newByteChannel(replayFile, StandardOpenOption.APPEND,
                    StandardOpenOption.CREATE);
            this.replayChannel = Optional.of(channel);
        } catch (IOException e) {
            LOG.error("Couldn't open channel for logging to replay file: {}", replayFile, e);
        }
    });

    this.lastUpdateForTag = new HashMap<>();
}

From source file:com.spectralogic.ds3client.integration.Smoke_Test.java

@Test
public void getContents() throws IOException, URISyntaxException, XmlProcessingException, InterruptedException {
    final String bucketName = "test_get_contents";

    try {/* w  w  w .ja va2s  .  c o m*/
        HELPERS.ensureBucketExists(bucketName, envDataPolicyId);
        loadBookTestData(client, bucketName);

        final Ds3ClientHelpers.Job job = HELPERS.startReadAllJob(bucketName);

        final UUID jobId = job.getJobId();

        job.transfer(new Ds3ClientHelpers.ObjectChannelBuilder() {
            @Override
            public SeekableByteChannel buildChannel(final String key) throws IOException {
                final Path filePath = Files.createTempFile("ds3", key);
                return Files.newByteChannel(filePath, StandardOpenOption.DELETE_ON_CLOSE,
                        StandardOpenOption.WRITE);
            }
        });

        assertThat(JobStatusHelper.getJobStatusWithRetries(client, jobId, JobStatus.COMPLETED),
                is(JobStatus.COMPLETED));

    } finally {
        deleteAllContents(client, bucketName);
    }
}

From source file:com.spectralogic.ds3client.integration.Smoke_Test.java

@Test
public void partialObjectGetOverChunkBoundry() throws IOException, XmlProcessingException {
    final String bucketName = "partialGetOverBoundry";
    final String testFile = "testObject.txt";
    final Path filePath = Files.createTempFile("ds3", testFile);
    final int seed = 12345;
    LOG.info("Test file: " + filePath.toAbsolutePath());
    try {//  w w  w  .ja v  a 2  s. com
        HELPERS.ensureBucketExists(bucketName, envDataPolicyId);

        final int objectSize = PutBulkJobSpectraS3Request.MIN_UPLOAD_SIZE_IN_BYTES * 2;

        final List<Ds3Object> objs = Lists.newArrayList(new Ds3Object(testFile, objectSize));

        final Ds3ClientHelpers.Job putJob = HELPERS.startWriteJob(bucketName, objs, WriteJobOptions.create()
                .withMaxUploadSize(PutBulkJobSpectraS3Request.MIN_UPLOAD_SIZE_IN_BYTES));

        putJob.transfer(new Ds3ClientHelpers.ObjectChannelBuilder() {
            @Override
            public SeekableByteChannel buildChannel(final String key) throws IOException {
                final byte[] randomData = IOUtils.toByteArray(new RandomDataInputStream(seed, objectSize));
                final ByteBuffer randomBuffer = ByteBuffer.wrap(randomData);

                final ByteArraySeekableByteChannel channel = new ByteArraySeekableByteChannel(objectSize);
                channel.write(randomBuffer);

                return channel;

            }
        });

        final List<Ds3Object> partialObjectGet = Lists.newArrayList();
        partialObjectGet.add(new PartialDs3Object(testFile,
                Range.byPosition(PutBulkJobSpectraS3Request.MIN_UPLOAD_SIZE_IN_BYTES - 100,
                        PutBulkJobSpectraS3Request.MIN_UPLOAD_SIZE_IN_BYTES + 99)));

        final Ds3ClientHelpers.Job getJob = HELPERS.startReadJob(bucketName, partialObjectGet);

        getJob.transfer(new Ds3ClientHelpers.ObjectChannelBuilder() {
            @Override
            public SeekableByteChannel buildChannel(final String key) throws IOException {
                return Files.newByteChannel(filePath, StandardOpenOption.WRITE, StandardOpenOption.CREATE);
            }
        });

        assertThat(Files.size(filePath), is(200L));

    } finally {
        Files.delete(filePath);
        deleteAllContents(client, bucketName);
    }
}

From source file:com.spectralogic.ds3client.integration.Smoke_Test.java

@Test
public void partialGetWithBookOverChunkBoundry()
        throws IOException, XmlProcessingException, URISyntaxException {
    final String bucketName = "partialGetOnBook";
    final Path filePath = Files.createTempFile("ds3", "lesmis-copies.txt");
    LOG.info("TempFile for partial get of book: " + filePath.toAbsolutePath().toString());

    try {/* ww w  . ja v a2 s .  c  o m*/

        HELPERS.ensureBucketExists(bucketName, envDataPolicyId);

        final List<Ds3Object> putObjects = Lists.newArrayList(new Ds3Object("lesmis-copies.txt", 13290604));

        final Ds3ClientHelpers.Job putJob = HELPERS.startWriteJob(bucketName, putObjects, WriteJobOptions
                .create().withMaxUploadSize(PutBulkJobSpectraS3Request.MIN_UPLOAD_SIZE_IN_BYTES));

        putJob.transfer(new ResourceObjectPutter("largeFiles/"));

        final List<Ds3Object> getObjects = Lists.newArrayList();
        getObjects.add(new PartialDs3Object("lesmis-copies.txt", Range.byLength(1048476, 200)));

        final Ds3ClientHelpers.Job getJob = HELPERS.startReadJob(bucketName, getObjects);

        getJob.transfer(new Ds3ClientHelpers.ObjectChannelBuilder() {
            @Override
            public SeekableByteChannel buildChannel(final String key) throws IOException {
                return Files.newByteChannel(filePath, StandardOpenOption.WRITE, StandardOpenOption.CREATE);
            }
        });

        final Path expectedResultPath = Paths.get(Smoke_Test.class.getResource("/largeFiles/output").toURI());

        assertThat(Files.size(filePath), is(200L));
        final String partialFile = new String(Files.readAllBytes(filePath), Charset.forName("UTF-8"));
        final String expectedResult = new String(Files.readAllBytes(expectedResultPath),
                Charset.forName("UTF-8"));
        assertThat(partialFile, is(expectedResult.substring(0, expectedResult.length() - 1))); // need the trim to remove a newline that is added by the os
    } finally {
        deleteAllContents(client, bucketName);
        Files.delete(filePath);
    }
}

From source file:org.cryptomator.webdav.jackrabbit.resources.EncryptedDir.java

private void addMemberFile(DavResource resource, InputContext inputContext) throws DavException {
    final Path childPath = ResourcePathUtils.getPhysicalPath(resource);
    SeekableByteChannel channel = null;
    try {/*from w w  w  . j  a va2 s  .  c  o  m*/
        channel = Files.newByteChannel(childPath, StandardOpenOption.WRITE, StandardOpenOption.CREATE);
        cryptor.encryptFile(inputContext.getInputStream(), channel);
    } catch (SecurityException e) {
        throw new DavException(DavServletResponse.SC_FORBIDDEN, e);
    } catch (IOException e) {
        LOG.error("Failed to create file.", e);
        throw new IORuntimeException(e);
    } finally {
        IOUtils.closeQuietly(channel);
        IOUtils.closeQuietly(inputContext.getInputStream());
    }
}