Example usage for java.lang Thread interrupted

List of usage examples for java.lang Thread interrupted

Introduction

In this page you can find the example usage for java.lang Thread interrupted.

Prototype

public static boolean interrupted() 

Source Link

Document

Tests whether the current thread has been interrupted.

Usage

From source file:org.grouplens.lenskit.eval.traintest.TrainTestJob.java

@SuppressWarnings("PMD.AvoidCatchingThrowable")
private void runEvaluation() throws IOException, RecommenderBuildException {
    EventBus bus = task.getProject().getEventBus();
    bus.post(JobEvents.started(this));
    Closer closer = Closer.create();//from  ww  w.  ja v a2  s  .  c om
    try {
        outputs = task.getOutputs().getPrefixed(algorithmInfo, dataSet);
        TableWriter userResults = outputs.getUserWriter();
        List<Object> outputRow = Lists.newArrayList();

        logger.info("Building {} on {}", algorithmInfo, dataSet);
        StopWatch buildTimer = new StopWatch();
        buildTimer.start();
        buildRecommender();
        buildTimer.stop();
        logger.info("Built {} in {}", algorithmInfo.getName(), buildTimer);

        logger.info("Measuring {} on {}", algorithmInfo.getName(), dataSet.getName());

        StopWatch testTimer = new StopWatch();
        testTimer.start();
        List<Object> userRow = Lists.newArrayList();

        List<MetricWithAccumulator<?>> accumulators = Lists.newArrayList();

        for (Metric<?> eval : outputs.getMetrics()) {
            accumulators.add(makeMetricAccumulator(eval));
        }

        LongSet testUsers = dataSet.getTestData().getUserDAO().getUserIds();
        final NumberFormat pctFormat = NumberFormat.getPercentInstance();
        pctFormat.setMaximumFractionDigits(2);
        pctFormat.setMinimumFractionDigits(2);
        final int nusers = testUsers.size();
        logger.info("Testing {} on {} ({} users)", algorithmInfo, dataSet, nusers);
        int ndone = 0;
        for (LongIterator iter = testUsers.iterator(); iter.hasNext();) {
            if (Thread.interrupted()) {
                throw new InterruptedException("eval job interrupted");
            }
            long uid = iter.nextLong();
            userRow.add(uid);
            userRow.add(null); // placeholder for the per-user time
            assert userRow.size() == 2;

            Stopwatch userTimer = Stopwatch.createStarted();
            TestUser test = getUserResults(uid);

            userRow.add(test.getTrainHistory().size());
            userRow.add(test.getTestHistory().size());

            for (MetricWithAccumulator<?> accum : accumulators) {
                List<Object> ures = accum.measureUser(test);
                if (ures != null) {
                    userRow.addAll(ures);
                }
            }
            userTimer.stop();
            userRow.set(1, userTimer.elapsed(TimeUnit.MILLISECONDS) * 0.001);
            if (userResults != null) {
                try {
                    userResults.writeRow(userRow);
                } catch (IOException e) {
                    throw new RuntimeException("error writing user row", e);
                }
            }
            userRow.clear();

            ndone += 1;
            if (ndone % 100 == 0) {
                testTimer.split();
                double time = testTimer.getSplitTime();
                double tpu = time / ndone;
                double tleft = (nusers - ndone) * tpu;
                logger.info("tested {} of {} users ({}), ETA {}", ndone, nusers,
                        pctFormat.format(((double) ndone) / nusers),
                        DurationFormatUtils.formatDurationHMS((long) tleft));
            }
        }
        testTimer.stop();
        logger.info("Tested {} in {}", algorithmInfo.getName(), testTimer);

        writeMetricValues(buildTimer, testTimer, outputRow, accumulators);
        bus.post(JobEvents.finished(this));
    } catch (Throwable th) {
        bus.post(JobEvents.failed(this, th));
        throw closer.rethrow(th, RecommenderBuildException.class);
    } finally {
        try {
            cleanup();
        } finally {
            outputs = null;
            closer.close();
        }
    }
}

From source file:org.commonjava.couch.change.CouchChangeListener.java

@Override
public void run() {
    final CouchDocChangeDeserializer docDeserializer = new CouchDocChangeDeserializer();

    all: while (!Thread.interrupted()) {
        HttpGet get;//from w  w w  .j a v a 2  s.  c o m
        try {
            final String url = buildUrl(config.getDatabaseUrl(), metadata.getUrlParameters(), CHANGES_SERVICE);

            get = new HttpGet(url);
        } catch (final MalformedURLException e) {
            logger.error("Failed to construct changes URL for db: %s. Reason: %s", e, config.getDatabaseUrl(),
                    e.getMessage());
            break;
        }

        String encoding = null;
        try {
            // logger.info( "requesting changes..." );

            final HttpResponse response = http.executeHttpWithResponse(get, "Failed to open changes stream.");

            if (response.getEntity() == null) {
                logger.error("Changes stream did not return a response body.");
            } else {
                final Header encodingHeader = response.getEntity().getContentEncoding();
                if (encodingHeader == null) {
                    encoding = "UTF-8";
                } else {
                    encoding = encodingHeader.getValue();
                }

                final InputStream stream = response.getEntity().getContent();

                running = true;
                synchronized (internalLock) {
                    internalLock.notifyAll();
                }

                final CouchDocChangeList changes = serializer.fromJson(stream, encoding,
                        CouchDocChangeList.class, docDeserializer);

                for (final CouchDocChange change : changes) {
                    logger.info("Processing change: %s", change.getId());

                    if (!change.getId().equals(CHANGE_LISTENER_DOCID)) {
                        metadata.setLastProcessedSequenceId(change.getSequence());
                        dispatcher.documentChanged(change);
                    }
                }
            }
        } catch (final CouchDBException e) {
            logger.error("Failed to read changes stream for db: %s. Reason: %s", e, config.getDatabaseUrl(),
                    e.getMessage());
            break;
        } catch (final UnsupportedEncodingException e) {
            logger.error("Invalid content encoding for changes response: %s. Reason: %s", e, encoding,
                    e.getMessage());
            break;
        } catch (final IOException e) {
            logger.error("Error reading changes response content. Reason: %s", e, e.getMessage());
            break;
        } finally {
            http.cleanup(get);
        }

        try {
            Thread.sleep(2000);
        } catch (final InterruptedException e) {
            break all;
        }
    }

    synchronized (internalLock) {
        internalLock.notifyAll();
    }
}

From source file:org.apache.solr.handler.TestHdfsBackupRestoreCore.java

@BeforeClass
public static void setupClass() throws Exception {
    dfsCluster = HdfsTestUtil.setupClass(createTempDir().toFile().getAbsolutePath());
    hdfsUri = HdfsTestUtil.getURI(dfsCluster);
    try {/*  w w  w.java2s  .co  m*/
        URI uri = new URI(hdfsUri);
        Configuration conf = HdfsTestUtil.getClientConfiguration(dfsCluster);
        conf.setBoolean("fs.hdfs.impl.disable.cache", true);
        fs = FileSystem.get(uri, conf);

        if (fs instanceof DistributedFileSystem) {
            // Make sure dfs is not in safe mode
            while (((DistributedFileSystem) fs).setSafeMode(SafeModeAction.SAFEMODE_GET, true)) {
                log.warn("The NameNode is in SafeMode - Solr will wait 5 seconds and try again.");
                try {
                    Thread.sleep(5000);
                } catch (InterruptedException e) {
                    Thread.interrupted();
                    // continue
                }
            }
        }

        fs.mkdirs(new org.apache.hadoop.fs.Path("/backup"));
    } catch (IOException | URISyntaxException e) {
        throw new RuntimeException(e);
    }

    System.setProperty("solr.hdfs.default.backup.path", "/backup");
    System.setProperty("solr.hdfs.home", hdfsUri + "/solr");
    useFactory("solr.StandardDirectoryFactory");

    configureCluster(1)// nodes
            .addConfig("conf1", TEST_PATH().resolve("configsets").resolve("cloud-minimal").resolve("conf"))
            .withSolrXml(HDFS_REPO_SOLR_XML).configure();

    docsSeed = random().nextLong();
}

From source file:at.bitfire.davdroid.syncadapter.SyncManager.java

@TargetApi(21)
public void performSync() {
    int syncPhase = SYNC_PHASE_PREPARE;
    try {/*w  w w  . j  av a 2 s. com*/
        App.log.info("Preparing synchronization");
        prepare();

        if (Thread.interrupted())
            return;
        syncPhase = SYNC_PHASE_QUERY_CAPABILITIES;
        App.log.info("Querying capabilities");
        queryCapabilities();

        syncPhase = SYNC_PHASE_PROCESS_LOCALLY_DELETED;
        App.log.info("Processing locally deleted entries");
        processLocallyDeleted();

        if (Thread.interrupted())
            return;
        syncPhase = SYNC_PHASE_PREPARE_DIRTY;
        App.log.info("Locally preparing dirty entries");
        prepareDirty();

        syncPhase = SYNC_PHASE_UPLOAD_DIRTY;
        App.log.info("Uploading dirty entries");
        uploadDirty();

        syncPhase = SYNC_PHASE_CHECK_SYNC_STATE;
        App.log.info("Checking sync state");
        if (checkSyncState()) {
            syncPhase = SYNC_PHASE_LIST_LOCAL;
            App.log.info("Listing local entries");
            listLocal();

            if (Thread.interrupted())
                return;
            syncPhase = SYNC_PHASE_LIST_REMOTE;
            App.log.info("Listing remote entries");
            listRemote();

            if (Thread.interrupted())
                return;
            syncPhase = SYNC_PHASE_COMPARE_LOCAL_REMOTE;
            App.log.info("Comparing local/remote entries");
            compareLocalRemote();

            syncPhase = SYNC_PHASE_DOWNLOAD_REMOTE;
            App.log.info("Downloading remote entries");
            downloadRemote();

            syncPhase = SYNC_PHASE_POST_PROCESSING;
            App.log.info("Post-processing");
            postProcess();

            syncPhase = SYNC_PHASE_SAVE_SYNC_STATE;
            App.log.info("Saving sync state");
            saveSyncState();
        } else
            App.log.info("Remote collection didn't change, skipping remote sync");

    } catch (IOException | ServiceUnavailableException e) {
        App.log.log(Level.WARNING, "I/O exception during sync, trying again later", e);
        syncResult.stats.numIoExceptions++;

        if (e instanceof ServiceUnavailableException) {
            Date retryAfter = ((ServiceUnavailableException) e).retryAfter;
            if (retryAfter != null) {
                // how many seconds to wait? getTime() returns ms, so divide by 1000
                syncResult.delayUntil = (retryAfter.getTime() - new Date().getTime()) / 1000;
            }
        }

    } catch (Exception | OutOfMemoryError e) {
        final int messageString;

        if (e instanceof UnauthorizedException) {
            App.log.log(Level.SEVERE, "Not authorized anymore", e);
            messageString = R.string.sync_error_unauthorized;
            syncResult.stats.numAuthExceptions++;
        } else if (e instanceof HttpException || e instanceof DavException) {
            App.log.log(Level.SEVERE, "HTTP/DAV Exception during sync", e);
            messageString = R.string.sync_error_http_dav;
            syncResult.stats.numParseExceptions++;
        } else if (e instanceof CalendarStorageException || e instanceof ContactsStorageException) {
            App.log.log(Level.SEVERE, "Couldn't access local storage", e);
            messageString = R.string.sync_error_local_storage;
            syncResult.databaseError = true;
        } else {
            App.log.log(Level.SEVERE, "Unknown sync error", e);
            messageString = R.string.sync_error;
            syncResult.stats.numParseExceptions++;
        }

        final Intent detailsIntent;
        if (e instanceof UnauthorizedException) {
            detailsIntent = new Intent(context, AccountSettingsActivity.class);
            detailsIntent.putExtra(AccountSettingsActivity.EXTRA_ACCOUNT, account);
        } else {
            detailsIntent = new Intent(context, DebugInfoActivity.class);
            detailsIntent.putExtra(DebugInfoActivity.KEY_THROWABLE, e);
            detailsIntent.putExtra(DebugInfoActivity.KEY_ACCOUNT, account);
            detailsIntent.putExtra(DebugInfoActivity.KEY_AUTHORITY, authority);
            detailsIntent.putExtra(DebugInfoActivity.KEY_PHASE, syncPhase);
        }

        // to make the PendingIntent unique
        detailsIntent.setData(Uri.parse("uri://" + getClass().getName() + "/" + uniqueCollectionId));

        NotificationCompat.Builder builder = new NotificationCompat.Builder(context);
        builder.setSmallIcon(R.drawable.ic_error_light).setLargeIcon(App.getLauncherBitmap(context))
                .setContentTitle(getSyncErrorTitle())
                .setContentIntent(
                        PendingIntent.getActivity(context, 0, detailsIntent, PendingIntent.FLAG_CANCEL_CURRENT))
                .setCategory(NotificationCompat.CATEGORY_ERROR);

        try {
            String[] phases = context.getResources().getStringArray(R.array.sync_error_phases);
            String message = context.getString(messageString, phases[syncPhase]);
            builder.setContentText(message);
        } catch (IndexOutOfBoundsException ex) {
            // should never happen
        }

        notificationManager.notify(uniqueCollectionId, notificationId(), builder.build());
    }
}

From source file:org.marketcetera.util.except.ExceptUtilsTest.java

@Test
public void interruptionNestedThrow() {
    CloneNotSupportedException nested = new CloneNotSupportedException();
    Thread.currentThread().interrupt();
    try {// www.  j  ava 2 s .com
        ExceptUtils.checkInterruption(nested);
        fail();
    } catch (InterruptedException ex) {
        assertTrue(Thread.interrupted());
        assertEquals("Thread execution was interrupted", ex.getMessage());
        assertEquals(nested, ex.getCause());
    }
}

From source file:com.brienwheeler.lib.test.stepper.SteppableThread.java

private void waitForNextStepInternal() {
    boolean interrupted = Thread.interrupted(); // test and clear

    try {/*from   w w w  .j  a va  2 s  .com*/
        boolean done = false;
        while (!done) {
            try {
                if (firstWait) {
                    firstWait = false;
                } else {
                    verbose("signalling done");
                    if (stepDone.await() == 0)
                        stepDone.reset();
                }
                done = true;
            } catch (InterruptedException e) {
                verbose("interrupted");
                interrupted = true;
                stepDone.reset();
                signalInterruptProcessed();
            } catch (BrokenBarrierException e) {
                throw new RuntimeException(e);
            }
        }

        done = false;
        while (!done) {
            try {
                verbose("waiting for release");
                if (stepStart.await() == 0)
                    stepStart.reset();
                done = true;
            } catch (InterruptedException e) {
                verbose("interrupted");
                interrupted = true;
                stepStart.reset();
                signalInterruptProcessed();
            } catch (BrokenBarrierException e) {
                throw new RuntimeException(e);
            }
        }
    } finally {
        if (interrupted)
            interrupt();
    }
}

From source file:eu.stratosphere.pact.test.cancelling.CancellingTestBase.java

public void runAndCancelJob(Plan plan, int msecsTillCanceling, int maxTimeTillCanceled) throws Exception {
    try {//from  w  w w  .j  av a2 s .  co  m
        // submit job
        final JobGraph jobGraph = getJobGraph(plan);

        final long startingTime = System.currentTimeMillis();
        long cancelTime = -1L;
        final JobClient client = cluster.getJobClient(jobGraph, null);
        final JobSubmissionResult submissionResult = client.submitJob();
        if (submissionResult.getReturnCode() != AbstractJobResult.ReturnCode.SUCCESS) {
            throw new IllegalStateException(submissionResult.getDescription());
        }

        final int interval = client.getRecommendedPollingInterval();
        final long sleep = interval * 1000L;

        Thread.sleep(sleep / 2);

        long lastProcessedEventSequenceNumber = -1L;

        while (true) {

            if (Thread.interrupted()) {
                throw new IllegalStateException("Job client has been interrupted");
            }

            final long now = System.currentTimeMillis();

            if (cancelTime < 0L) {

                // Cancel job
                if (startingTime + msecsTillCanceling < now) {

                    LOG.info("Issuing cancel request");

                    final JobCancelResult jcr = client.cancelJob();

                    if (jcr == null) {
                        throw new IllegalStateException("Return value of cancelJob is null!");
                    }

                    if (jcr.getReturnCode() != AbstractJobResult.ReturnCode.SUCCESS) {
                        throw new IllegalStateException(jcr.getDescription());
                    }

                    // Save when the cancel request has been issued
                    cancelTime = now;
                }
            } else {

                // Job has already been canceled
                if (cancelTime + maxTimeTillCanceled < now) {
                    throw new IllegalStateException("Cancelling of job took " + (now - cancelTime)
                            + " milliseconds, only " + maxTimeTillCanceled + " milliseconds are allowed");
                }
            }

            final JobProgressResult jobProgressResult = client.getJobProgress();

            if (jobProgressResult == null) {
                throw new IllegalStateException("Returned job progress is unexpectedly null!");
            }

            if (jobProgressResult.getReturnCode() == AbstractJobResult.ReturnCode.ERROR) {
                throw new IllegalStateException(
                        "Could not retrieve job progress: " + jobProgressResult.getDescription());
            }

            boolean exitLoop = false;

            final Iterator<AbstractEvent> it = jobProgressResult.getEvents();
            while (it.hasNext()) {

                final AbstractEvent event = it.next();

                // Did we already process that event?
                if (lastProcessedEventSequenceNumber >= event.getSequenceNumber()) {
                    continue;
                }

                lastProcessedEventSequenceNumber = event.getSequenceNumber();

                // Check if we can exit the loop
                if (event instanceof JobEvent) {
                    final JobEvent jobEvent = (JobEvent) event;
                    final JobStatus jobStatus = jobEvent.getCurrentJobStatus();

                    switch (jobStatus) {
                    case FINISHED:
                        throw new IllegalStateException("Job finished successfully");
                    case FAILED:
                        throw new IllegalStateException("Job failed");
                    case CANCELED:
                        exitLoop = true;
                        break;
                    case SCHEDULED: // okay
                    case RUNNING:
                        break;
                    default:
                        throw new Exception("Bug: Unrecognized Job Status.");
                    }
                }

                if (exitLoop) {
                    break;
                }
            }

            if (exitLoop) {
                break;
            }

            Thread.sleep(sleep);
        }

    } catch (Exception e) {
        LOG.error(e);
        fail(StringUtils.stringifyException(e));
        return;
    }
}

From source file:eu.stratosphere.test.cancelling.CancellingTestBase.java

public void runAndCancelJob(Plan plan, int msecsTillCanceling, int maxTimeTillCanceled) throws Exception {
    try {/*w  w  w  .  j  a  v a 2 s.c  om*/
        // submit job
        final JobGraph jobGraph = getJobGraph(plan);

        final long startingTime = System.currentTimeMillis();
        long cancelTime = -1L;
        final JobClient client = this.executor.getJobClient(jobGraph);
        final JobSubmissionResult submissionResult = client.submitJob();
        if (submissionResult.getReturnCode() != AbstractJobResult.ReturnCode.SUCCESS) {
            throw new IllegalStateException(submissionResult.getDescription());
        }

        final int interval = client.getRecommendedPollingInterval();
        final long sleep = interval * 1000L;

        Thread.sleep(sleep / 2);

        long lastProcessedEventSequenceNumber = -1L;

        while (true) {

            if (Thread.interrupted()) {
                throw new IllegalStateException("Job client has been interrupted");
            }

            final long now = System.currentTimeMillis();

            if (cancelTime < 0L) {

                // Cancel job
                if (startingTime + msecsTillCanceling < now) {

                    LOG.info("Issuing cancel request");

                    final JobCancelResult jcr = client.cancelJob();

                    if (jcr == null) {
                        throw new IllegalStateException("Return value of cancelJob is null!");
                    }

                    if (jcr.getReturnCode() != AbstractJobResult.ReturnCode.SUCCESS) {
                        throw new IllegalStateException(jcr.getDescription());
                    }

                    // Save when the cancel request has been issued
                    cancelTime = now;
                }
            } else {

                // Job has already been canceled
                if (cancelTime + maxTimeTillCanceled < now) {
                    throw new IllegalStateException("Cancelling of job took " + (now - cancelTime)
                            + " milliseconds, only " + maxTimeTillCanceled + " milliseconds are allowed");
                }
            }

            final JobProgressResult jobProgressResult = client.getJobProgress();

            if (jobProgressResult == null) {
                throw new IllegalStateException("Returned job progress is unexpectedly null!");
            }

            if (jobProgressResult.getReturnCode() == AbstractJobResult.ReturnCode.ERROR) {
                throw new IllegalStateException(
                        "Could not retrieve job progress: " + jobProgressResult.getDescription());
            }

            boolean exitLoop = false;

            final Iterator<AbstractEvent> it = jobProgressResult.getEvents();
            while (it.hasNext()) {

                final AbstractEvent event = it.next();

                // Did we already process that event?
                if (lastProcessedEventSequenceNumber >= event.getSequenceNumber()) {
                    continue;
                }

                lastProcessedEventSequenceNumber = event.getSequenceNumber();

                // Check if we can exit the loop
                if (event instanceof JobEvent) {
                    final JobEvent jobEvent = (JobEvent) event;
                    final JobStatus jobStatus = jobEvent.getCurrentJobStatus();

                    switch (jobStatus) {
                    case FINISHED:
                        throw new IllegalStateException("Job finished successfully");
                    case FAILED:
                        throw new IllegalStateException("Job failed");
                    case CANCELED:
                        exitLoop = true;
                        break;
                    case SCHEDULED: // okay
                    case RUNNING:
                        break;
                    default:
                        throw new Exception("Bug: Unrecognized Job Status.");
                    }
                }

                if (exitLoop) {
                    break;
                }
            }

            if (exitLoop) {
                break;
            }

            Thread.sleep(sleep);
        }

    } catch (Exception e) {
        LOG.error(e);
        fail(StringUtils.stringifyException(e));
        return;
    }
}

From source file:com.splout.db.dnode.Fetcher.java

private File hdfsFetch(Path fromPath, Reporter reporter) throws IOException, InterruptedException {
    UUID uniqueId = UUID.randomUUID();
    File toFile = new File(tempDir, uniqueId.toString() + "/" + fromPath.getName());
    File toDir = new File(toFile.getParent());
    if (toDir.exists()) {
        FileUtils.deleteDirectory(toDir);
    }//www  .ja  v  a 2 s  .c  o  m
    toDir.mkdirs();
    Path toPath = new Path(toFile.getCanonicalPath());

    FileSystem fS = fromPath.getFileSystem(hadoopConf);
    FileSystem tofS = FileSystem.getLocal(hadoopConf);

    Throttler throttler = new Throttler((double) bytesPerSecThrottle);
    try {
        for (FileStatus fStatus : fS.globStatus(fromPath)) {
            log.info("Copying " + fStatus.getPath() + " to " + toPath);
            long bytesSoFar = 0;

            FSDataInputStream iS = fS.open(fStatus.getPath());
            FSDataOutputStream oS = tofS.create(toPath);

            byte[] buffer = new byte[downloadBufferSize];

            int nRead;
            while ((nRead = iS.read(buffer, 0, buffer.length)) != -1) {
                // Needed to being able to be interrupted at any moment.
                if (Thread.interrupted()) {
                    iS.close();
                    oS.close();
                    cleanDirNoExceptions(toDir);
                    throw new InterruptedException();
                }
                bytesSoFar += nRead;
                oS.write(buffer, 0, nRead);
                throttler.incrementAndThrottle(nRead);
                if (bytesSoFar >= bytesToReportProgress) {
                    reporter.progress(bytesSoFar);
                    bytesSoFar = 0l;
                }
            }

            if (reporter != null) {
                reporter.progress(bytesSoFar);
            }

            oS.close();
            iS.close();
        }

        return toDir;
    } catch (ClosedByInterruptException e) {
        // This can be thrown by the method read.
        cleanDirNoExceptions(toDir);
        throw new InterruptedIOException();
    }
}

From source file:com.granita.contacticloudsync.syncadapter.TasksSyncManager.java

@Override
protected void downloadRemote() throws IOException, HttpException, DavException, CalendarStorageException {
    log.info("Downloading " + toDownload.size() + " tasks (" + MAX_MULTIGET + " at once)");

    // download new/updated iCalendars from server
    for (DavResource[] bunch : ArrayUtils.partition(toDownload.toArray(new DavResource[toDownload.size()]),
            MAX_MULTIGET)) {//from  ww  w  . j av a  2s.  c om
        if (Thread.interrupted())
            return;

        log.info("Downloading " + StringUtils.join(bunch, ", "));

        if (bunch.length == 1) {
            // only one contact, use GET
            DavResource remote = bunch[0];

            ResponseBody body = remote.get("text/calendar");
            String eTag = ((GetETag) remote.properties.get(GetETag.NAME)).eTag;

            Charset charset = Charsets.UTF_8;
            MediaType contentType = body.contentType();
            if (contentType != null)
                charset = contentType.charset(Charsets.UTF_8);

            @Cleanup
            InputStream stream = body.byteStream();
            processVTodo(remote.fileName(), eTag, stream, charset);

        } else {
            // multiple contacts, use multi-get
            List<HttpUrl> urls = new LinkedList<>();
            for (DavResource remote : bunch)
                urls.add(remote.location);
            davCalendar().multiget(urls.toArray(new HttpUrl[urls.size()]));

            // process multiget results
            for (DavResource remote : davCollection.members) {
                String eTag;
                GetETag getETag = (GetETag) remote.properties.get(GetETag.NAME);
                if (getETag != null)
                    eTag = getETag.eTag;
                else
                    throw new DavException("Received multi-get response without ETag");

                Charset charset = Charsets.UTF_8;
                GetContentType getContentType = (GetContentType) remote.properties.get(GetContentType.NAME);
                if (getContentType != null && getContentType.type != null) {
                    MediaType type = MediaType.parse(getContentType.type);
                    if (type != null)
                        charset = type.charset(Charsets.UTF_8);
                }

                CalendarData calendarData = (CalendarData) remote.properties.get(CalendarData.NAME);
                if (calendarData == null || calendarData.iCalendar == null)
                    throw new DavException("Received multi-get response without address data");

                @Cleanup
                InputStream stream = new ByteArrayInputStream(calendarData.iCalendar.getBytes());
                processVTodo(remote.fileName(), eTag, stream, charset);
            }
        }
    }
}