Example usage for java.io InputStream reset

List of usage examples for java.io InputStream reset

Introduction

In this page you can find the example usage for java.io InputStream reset.

Prototype

public synchronized void reset() throws IOException 

Source Link

Document

Repositions this stream to the position at the time the mark method was last called on this input stream.

Usage

From source file:au.org.theark.study.util.DataUploader.java

public StringBuffer uploadAndReportSubjectAttachmentDataFile(InputStream inputStream, long size,
        String fileFormat, char delimChar, String user_id) throws FileFormatException, ArkSystemException {
    uploadReport = new StringBuffer();
    long rowCount = 0;
    long insertFieldsCount = 0;
    long updateFieldsCount = 0;

    List<SubjectFile> subjectFiles = new ArrayList<SubjectFile>();

    InputStreamReader inputStreamReader = null;
    CsvReader csvReader = null;/*from w w w  .j a  v a  2s.  c  om*/
    DecimalFormat decimalFormat = new DecimalFormat("0.00");
    delimiterCharacter = delimChar;

    if (fileFormat.equalsIgnoreCase("XLS")) {
        Workbook w;
        try {
            w = Workbook.getWorkbook(inputStream);
            delimiterCharacter = ',';
            XLStoCSV xlsToCsv = new XLStoCSV(delimiterCharacter);
            inputStream = xlsToCsv.convertXlsToCsv(w);
            inputStream.reset();
        } catch (BiffException e) {
            log.error(e.getMessage());
        } catch (IOException e) {
            log.error(e.getMessage());
        }
    }

    try {
        inputStreamReader = new InputStreamReader(inputStream);
        csvReader = new CsvReader(inputStreamReader, delimiterCharacter);
        csvReader.readHeaders();
        String[] stringLineArray;

        int subjectUidIndex = csvReader.getIndex("SUBJECTUID");
        int filePathIndex = csvReader.getIndex("FILE_NAME_WITH_FULL_PATH");
        int studyComponentIndex = csvReader.getIndex("STUDY_COMPONENT");
        int commentIndex = csvReader.getIndex("COMMENT");

        List<StudyComp> studyCompList = iArkCommonService.getStudyComponentByStudy(study);

        while (csvReader.readRecord()) {
            ++rowCount;
            stringLineArray = csvReader.getValues();

            SubjectFile subjectFile = new SubjectFile();

            subjectFile.setUserId(user_id);

            String subjectUID = stringLineArray[subjectUidIndex];
            String studyCompName = stringLineArray[studyComponentIndex];
            LinkSubjectStudy subject = iArkCommonService.getSubjectByUID(subjectUID, study);
            subjectFile.setLinkSubjectStudy(subject);
            for (StudyComp studyComp : studyCompList) {
                if (studyComp.getName().equals(studyCompName)) {
                    subjectFile.setStudyComp(studyComp);
                    break;
                }
            }
            subjectFile.setComments(stringLineArray[commentIndex]);

            // File processing

            String sourcePath = stringLineArray[filePathIndex];

            File file = new File(sourcePath);

            subjectFile.setChecksum(iArkCommonService.generateArkFileChecksum(file, "MD5"));
            String fileName = file.getName();
            subjectFile.setFilename(fileName);
            String fileId = iArkCommonService.generateArkFileId(fileName);
            subjectFile.setFileId(fileId);

            String directoryName = iArkCommonService.getArkFileDirName(study.getId(), subjectUID,
                    au.org.theark.study.web.Constants.ARK_SUBJECT_ATTACHEMENT_DIR);
            // TODO need to check directory created successfully
            iArkCommonService.createArkFileAttachmentDirectoy(directoryName);
            String destinationPath = directoryName + File.separator + fileId;
            iArkCommonService.copyArkLargeFileAttachments(sourcePath, destinationPath);

            subjectFiles.add(subjectFile);
        }

    } catch (Exception e) {
        e.printStackTrace();
        throw new ArkSystemException(e.getMessage());
    } finally {
        uploadReport.append("Total file size: ");
        uploadReport.append(decimalFormat.format(size / 1024.0 / 1024.0));
        uploadReport.append(" MB");
        uploadReport.append("\n");

        if (csvReader != null) {
            try {
                csvReader.close();
            } catch (Exception ex) {
                log.error("Cleanup operation failed: csvRdr.close()", ex);
            }
        }
        if (inputStreamReader != null) {
            try {
                inputStreamReader.close();
            } catch (Exception ex) {
                log.error("Cleanup operation failed: isr.close()", ex);
            }
        }
    }

    uploadReport.append("Process ");
    uploadReport.append(rowCount);
    uploadReport.append(" rows of data");
    uploadReport.append("\n");

    uploadReport.append(insertFieldsCount);
    uploadReport.append(" fields were inserted.");
    uploadReport.append("\n");
    uploadReport.append(updateFieldsCount);
    uploadReport.append(" fields were updated.");
    uploadReport.append("\n");

    try {
        iStudyService.processSubjectAttachmentBatch(subjectFiles);
    } catch (Exception e) {
        e.printStackTrace();
        throw new ArkSystemException(e.getMessage());
    }

    return uploadReport;
}

From source file:au.org.theark.study.util.DataUploader.java

/**
 * Upload and report Family Custom field Data.
 * /*from   w ww  .ja  va2  s.c  o m*/
 * @param inputStream
 * @param size
 * @param fileFormat
 * @param delimChar
 * @param listOfUIDsToUpdate
 * @return
 * @throws FileFormatException
 * @throws ArkSystemException
 */
public StringBuffer uploadAndReportFamilyCustomDataFile(InputStream inputStream, long size, String fileFormat,
        char delimChar, List<String> listOfUIDsToUpdate) throws FileFormatException, ArkSystemException {
    List<FamilyCustomFieldData> customFieldsToUpdate = new ArrayList<FamilyCustomFieldData>();
    List<FamilyCustomFieldData> customFieldsToInsert = new ArrayList<FamilyCustomFieldData>();
    CsvReader csvReader = null;
    delimiterCharacter = delimChar;
    uploadReport = new StringBuffer();
    DecimalFormat decimalFormat = new DecimalFormat("0.00");
    if (fileFormat.equalsIgnoreCase("XLS")) {
        Workbook w;
        try {
            w = Workbook.getWorkbook(inputStream);
            delimiterCharacter = ',';
            XLStoCSV xlsToCsv = new XLStoCSV(delimiterCharacter);
            inputStream = xlsToCsv.convertXlsToCsv(w);
            inputStream.reset();
        } catch (BiffException e) {
            log.error(e.getMessage());
        } catch (IOException e) {
            log.error(e.getMessage());
        }
    }

    int familyCount = 0;
    long updateFieldsCount = 0L;
    long insertFieldsCount = 0L;
    long emptyDataCount = 0L;
    try {
        String[] stringLineArray;

        if (size <= 0) {
            uploadReport.append("ERROR:  The input size was not greater than 0. Actual length reported: ");
            uploadReport.append(size);
            uploadReport.append("\n");
            throw new FileFormatException(
                    "The input size was not greater than 0. Actual length reported: " + size);
        }
        csvReader = new CsvReader(new InputStreamReader(inputStream), delimChar);
        csvReader.readHeaders();
        List<String> fieldNameCollection = Arrays.asList(csvReader.getHeaders());
        ArkFunction subjectCustomFieldArkFunction = iArkCommonService
                .getArkFunctionByName(Constants.FUNCTION_KEY_VALUE_SUBJECT_CUSTOM_FIELD);
        CustomFieldType customFieldType = iArkCommonService.getCustomFieldTypeByName(Constants.FAMILY);
        List<CustomFieldDisplay> cfdsThatWeNeed = iArkCommonService.getCustomFieldDisplaysInWithCustomFieldType(
                fieldNameCollection, study, subjectCustomFieldArkFunction, customFieldType);
        List<FamilyCustomFieldData> dataThatWeHave = iArkCommonService.getFamilyCustomFieldDataFor(study,
                cfdsThatWeNeed, listOfUIDsToUpdate);
        // read one line which contains potentially many custom fields
        while (csvReader.readRecord()) {
            log.info("reading record " + familyCount);
            stringLineArray = csvReader.getValues();
            String familyUid = stringLineArray[0];
            //Additional validation to verify familyUid shoud be a unique value 
            // for a study.

            CustomField customField = null;
            //Iterate through custom fileds and get pick one family custom field at a time.
            for (CustomFieldDisplay cfd : cfdsThatWeNeed) {
                customField = cfd.getCustomField();
                // log.info("got customfield from cfd");
                FamilyCustomFieldData dataInDB = getFamilyCustomFieldFromList(dataThatWeHave, familyUid, cfd);
                // log.info("got 'data in db' from cfd, subject and ALL data");
                String theDataAsString = csvReader.get(cfd.getCustomField().getName());
                // log.info("read data from file");

                if (theDataAsString != null && !theDataAsString.isEmpty()) {
                    if (dataInDB != null) {
                        dataInDB = (FamilyCustomFieldData) setValue(customField, cfd, dataInDB,
                                theDataAsString);
                        // log.info("have set value to entity");
                        customFieldsToUpdate.add(dataInDB);
                        // log.info("added entity to list");
                        updateFieldsCount++;
                    } else {
                        FamilyCustomFieldData dataToInsert = new FamilyCustomFieldData();
                        dataToInsert.setCustomFieldDisplay(cfd);
                        dataToInsert.setFamilyUid(familyUid);
                        dataToInsert.setStudy(study);
                        setValue(customField, cfd, dataToInsert, theDataAsString);
                        customFieldsToInsert.add(dataToInsert);
                        insertFieldsCount++;
                    }
                } else {
                    emptyDataCount++;
                }
            }

            familyCount++;
        }
        log.info("finished message for " + familyCount + "         updates= " + updateFieldsCount
                + " or \ncustomFieldsToupdate.size=" + customFieldsToUpdate.size() + "\n     inserts = "
                + insertFieldsCount + "  or  \ncustomFieldsToInsert.size = " + customFieldsToInsert.size()
                + "   amount of empty scells =" + emptyDataCount);
    } catch (IOException ioe) {
        uploadReport.append("SYSTEM ERROR:   Unexpected I/O exception whilst reading the family data file\n");
        log.error("processMatrixSubjectFile IOException stacktrace:", ioe);
        throw new ArkSystemException("Unexpected I/O exception whilst reading the family data file");
    } catch (Exception ex) {
        uploadReport.append("SYSTEM ERROR:   Unexpected exception whilst reading the family data file\n");
        log.error("processMatrixSubjectFile Exception stacktrace:", ex);
        throw new ArkSystemException("Unexpected exception occurred when trying to process family data file");
    } finally {
        uploadReport.append("Total file size: ");
        uploadReport.append(decimalFormat.format(size / 1024.0 / 1024.0));
        uploadReport.append(" MB");
        uploadReport.append("\n");

        if (csvReader != null) {
            try {
                csvReader.close();
            } catch (Exception ex) {
                log.error("Cleanup operation failed: csvRdr.close()", ex);
            }
        }
        /*if (inputStreamReader != null) {
           try {
              inputStreamReader.close();
           }
           catch (Exception ex) {
              log.error("Cleanup operation failed: isr.close()", ex);
           }
        }*/

    }

    uploadReport.append("Inserted ");
    uploadReport.append(familyCount);
    uploadReport.append(" rows of data");
    uploadReport.append("\n");

    uploadReport.append(insertFieldsCount);
    uploadReport.append(" fields were inserted.");
    uploadReport.append("\n");
    uploadReport.append(updateFieldsCount);
    uploadReport.append(" fields were updated.");
    uploadReport.append("\n");

    // TODO better exceptionhandling
    iStudyService.processFieldsBatch(customFieldsToUpdate, study, customFieldsToInsert);
    return uploadReport;
}

From source file:au.org.theark.study.util.DataUploader.java

/**
 * Upload and report Subject Custom field Data.
 * /* w w w. j  a  va2  s . c o m*/
 * @param inputStream
 * @param size
 * @param fileFormat
 * @param delimChar
 * @param listOfUIDsToUpdate
 * @return
 * @throws FileFormatException
 * @throws ArkSystemException
 * Used in step 4.
 */
public StringBuffer uploadAndReportSubjectCustomDataFile(InputStream inputStream, long size, String fileFormat,
        char delimChar, List<String> listOfUIDsToUpdate, UploadVO uploadVO)
        throws FileFormatException, ArkSystemException {
    List<SubjectCustomFieldData> customFieldsToUpdate = new ArrayList<SubjectCustomFieldData>();
    List<SubjectCustomFieldData> customFieldsToInsert = new ArrayList<SubjectCustomFieldData>();
    delimiterCharacter = delimChar;
    uploadReport = new StringBuffer();
    CsvReader csvReader = null;
    DecimalFormat decimalFormat = new DecimalFormat("0.00");

    if (fileFormat.equalsIgnoreCase("XLS")) {
        Workbook w;
        try {
            w = Workbook.getWorkbook(inputStream);
            delimiterCharacter = ',';
            XLStoCSV xlsToCsv = new XLStoCSV(delimiterCharacter);
            inputStream = xlsToCsv.convertXlsToCsv(w);
            inputStream.reset();
        } catch (BiffException e) {
            log.error(e.getMessage());
        } catch (IOException e) {
            log.error(e.getMessage());
        }
    }

    int subjectCount = 1;
    long updateFieldsCount = 0L;
    long insertFieldsCount = 0L;
    long emptyDataCount = 0L;
    int percentage = 0;
    int totalUploadSize = 0;
    try {

        String[] stringLineArray;
        List<LinkSubjectStudy> allSubjectWhichWillBeUpdated = null;
        totalUploadSize = listOfUIDsToUpdate.size();
        if (totalUploadSize > 0) {
            allSubjectWhichWillBeUpdated = iArkCommonService.getUniqueSubjectsWithTheseUIDs(study,
                    listOfUIDsToUpdate);
        } else {
            allSubjectWhichWillBeUpdated = new ArrayList<LinkSubjectStudy>(0);
        }
        if (size <= 0) {
            uploadReport.append("ERROR:  The input size was not greater than 0. Actual length reported: ");
            uploadReport.append(size);
            uploadReport.append("\n");
            throw new FileFormatException(
                    "The input size was not greater than 0. Actual length reported: " + size);
        }
        csvReader = new CsvReader(new InputStreamReader(inputStream), delimChar);
        csvReader.readHeaders();
        String[] headers = csvReader.getHeaders();
        List<String> fieldNameCollection = Arrays.asList(headers);
        ArkFunction subjectCustomFieldArkFunction = iArkCommonService
                .getArkFunctionByName(Constants.FUNCTION_KEY_VALUE_SUBJECT_CUSTOM_FIELD);// ");
        CustomFieldType customFieldType = iArkCommonService.getCustomFieldTypeByName(Constants.SUBJECT);
        List<CustomFieldDisplay> cfdsThatWeNeed = iArkCommonService.getCustomFieldDisplaysInWithCustomFieldType(
                fieldNameCollection, study, subjectCustomFieldArkFunction, customFieldType);
        List<SubjectCustomFieldData> dataThatWeHave = iArkCommonService
                .getSubjectCustomFieldDataFor(cfdsThatWeNeed, allSubjectWhichWillBeUpdated);
        // read one line which contains potentially many custom fields
        while (csvReader.readRecord()) {
            log.info("reading record " + subjectCount);
            percentage = (int) Math.round(((double) (subjectCount) / (double) totalUploadSize) * 100.0);
            uploadVO.setProgress(percentage);
            stringLineArray = csvReader.getValues();
            String subjectUID = stringLineArray[0];
            LinkSubjectStudy subject = getSubjectByUIDFromExistList(allSubjectWhichWillBeUpdated, subjectUID);
            // log.info("get subject from list");
            CustomField customField = null;
            for (CustomFieldDisplay cfd : cfdsThatWeNeed) {
                customField = cfd.getCustomField();
                // log.info("got customfield from cfd");
                SubjectCustomFieldData dataInDB = getSubjectCustomFieldFromList(dataThatWeHave, subjectUID,
                        cfd);
                // log.info("got 'data in db' from cfd, subject and ALL data");
                String theDataAsString = csvReader.get(cfd.getCustomField().getName());
                // log.info("read data from file");

                if (theDataAsString != null && !theDataAsString.isEmpty()) {
                    if (dataInDB != null) {
                        dataInDB = (SubjectCustomFieldData) setValue(customField, cfd, dataInDB,
                                theDataAsString);
                        // log.info("have set value to entity");
                        customFieldsToUpdate.add(dataInDB);
                        // log.info("added entity to list");
                        updateFieldsCount++;
                    } else {
                        SubjectCustomFieldData dataToInsert = new SubjectCustomFieldData();
                        dataToInsert.setCustomFieldDisplay(cfd);
                        dataToInsert.setLinkSubjectStudy(subject);
                        setValue(customField, cfd, dataToInsert, theDataAsString);
                        customFieldsToInsert.add(dataToInsert);
                        insertFieldsCount++;
                    }
                } else {
                    emptyDataCount++;
                }
            }

            subjectCount++;
        }
        log.info("finished message for " + subjectCount + "         updates= " + updateFieldsCount
                + " or \ncustomFieldsToupdate.size=" + customFieldsToUpdate.size() + "\n     inserts = "
                + insertFieldsCount + "  or  \ncustomFieldsToInsert.size = " + customFieldsToInsert.size()
                + "   amount of empty scells =" + emptyDataCount);
    } catch (IOException ioe) {
        uploadReport.append("SYSTEM ERROR:   Unexpected I/O exception whilst reading the subject data file\n");
        log.error("processMatrixSubjectFile IOException stacktrace:", ioe);
        throw new ArkSystemException("Unexpected I/O exception whilst reading the subject data file");
    } catch (Exception ex) {
        uploadReport.append("SYSTEM ERROR:   Unexpected exception whilst reading the subject data file\n");
        log.error("processMatrixSubjectFile Exception stacktrace:", ex);
        throw new ArkSystemException("Unexpected exception occurred when trying to process subject data file");
    } finally {
        uploadReport.append("Total file size: ");
        uploadReport.append(decimalFormat.format(size / 1024.0 / 1024.0));
        uploadReport.append(" MB");
        uploadReport.append("\n");

        if (csvReader != null) {
            try {
                csvReader.close();
            } catch (Exception ex) {
                log.error("Cleanup operation failed: csvRdr.close()", ex);
            }
        }
        if (inputStream != null) {
            try {
                inputStream.close();
            } catch (Exception ex) {
                log.error("Cleanup operation failed: isr.close()", ex);
            }
        }

    }

    uploadReport.append("Inserted ");
    uploadReport.append(subjectCount);
    uploadReport.append(" rows of data");
    uploadReport.append("\n");

    uploadReport.append(insertFieldsCount);
    uploadReport.append(" fields were inserted.");
    uploadReport.append("\n");
    uploadReport.append(updateFieldsCount);
    uploadReport.append(" fields were updated.");
    uploadReport.append("\n");

    // TODO better exceptionhandling
    iStudyService.processFieldsBatch(customFieldsToUpdate, study, customFieldsToInsert);
    return uploadReport;
}

From source file:org.artifactory.maven.PomTargetPathValidator.java

public void validate(InputStream in, boolean suppressPomConsistencyChecks) throws IOException {
    MavenXpp3Reader reader = new MavenXpp3Reader();
    try {/*from   w  w  w.ja  va  2 s .  co  m*/
        model = reader.read(new InputStreamReader(in, MavenModelUtils.UTF8));

        String groupId = getGroupId(model);

        if (StringUtils.isNotBlank(groupId)) {
            //Do not verify if the pom's groupid does not exist
            String modelVersion = getModelVersion(model);
            if (StringUtils.isBlank(modelVersion)) {
                String msg = String.format(
                        "The Pom version of '%s' does not exists. Please verify your POM content for correctness",
                        relPath);
                if (suppressPomConsistencyChecks) {
                    log.error("{} POM consistency checks are suppressed. Broken artifacts might have been "
                            + "stored in the repository - please resolve this manually.", msg);
                    return;
                } else {
                    throw new BadPomException(msg);
                }
            }

            //For snapshots with unique snapshot version, do not include the model version in the path
            boolean snapshot = moduleInfo.isIntegration();
            boolean versionSnapshot = MavenNaming.isNonUniqueSnapshotVersion(modelVersion);

            String pathPrefix = null;
            if (snapshot && !versionSnapshot) {
                pathPrefix = groupId.replace('.', '/') + "/" + model.getArtifactId() + "/";
            } else if (StringUtils.isNotBlank(modelVersion)) {
                pathPrefix = groupId.replace('.', '/') + "/" + model.getArtifactId() + "/" + modelVersion;
            }

            //Do not validate paths that contain property references
            if (pathPrefix != null && !pathPrefix.contains("${")
                    && !StringUtils.startsWithIgnoreCase(relPath, pathPrefix)) {
                String msg = String
                        .format("The target deployment path '%s' does not match the POM's expected path "
                                + "prefix '%s'. Please verify your POM content for correctness and make sure the source path "
                                + "is a valid Maven repository root path.", relPath, pathPrefix);
                if (suppressPomConsistencyChecks) {
                    log.warn("{} POM consistency checks are suppressed. Broken artifacts might have been "
                            + "stored in the repository - please resolve this manually.", msg);
                } else {
                    throw new BadPomException(msg);
                }
            }
        }
    } catch (XmlPullParserException e) {
        if (log.isDebugEnabled()) {
            try {
                in.reset();
                InputStreamReader isr = new InputStreamReader(in, MavenModelUtils.UTF8);
                String s = readString(isr);
                log.debug("Could not parse bad POM for '{}'. Bad POM content:\n{}\n", relPath, s);
            } catch (Exception ex) {
                log.trace("Could not extract bad POM content for '{}': {}.", relPath, e.getMessage());
            }
        }
        String message = "Failed to read POM for '" + relPath + "': " + e.getMessage() + ".";
        if (suppressPomConsistencyChecks) {
            log.error(message + " POM consistency checks are suppressed. Broken artifacts might have been "
                    + "stored in the repository - please resolve this manually.");
        } else {
            throw new BadPomException(message);
        }
    }
}

From source file:ee.sk.digidoc.factory.SAXDigiDocFactory.java

/**
 * Checks if this stream could be a bdoc input stream
 * @param is input stream, must support mark() and reset() operations!
 * @return true if bdoc/*from   w w w .j  a  va2  s. com*/
 */
private boolean isBdocFile(InputStream is) throws DigiDocException {
    try {
        if (is.markSupported())
            is.mark(10);
        byte[] tdata = new byte[10];
        int n = is.read(tdata);
        if (is.markSupported())
            is.reset();
        if (n >= 2 && tdata[0] == (byte) 'P' && tdata[1] == (byte) 'K')
            return true; // probably a zip file
        if (n >= 5 && tdata[0] == (byte) '<' && tdata[1] == (byte) '?' && tdata[2] == (byte) 'x'
                && tdata[3] == (byte) 'm' && tdata[4] == (byte) 'l')
            return false; // an xml file - probably ddoc format?
    } catch (Exception ex) {
        m_logger.error("Error determining file type: " + ex);
    }
    return false;
}

From source file:org.opendatakit.aggregate.odktables.api.perf.AggregateSynchronizer.java

/**
 *
 * @param destFile/*from  w  ww.  j  a  va  2s .  c  o  m*/
 * @param downloadUrl
 * @return true if the download was successful
 * @throws Exception
 */
private int downloadFile(File destFile, URI downloadUrl) throws Exception {

    // WiFi network connections can be renegotiated during a large form download
    // sequence.
    // This will cause intermittent download failures. Silently retry once after
    // each
    // failure. Only if there are two consecutive failures, do we abort.
    boolean success = false;
    int attemptCount = 0;
    while (!success && attemptCount++ <= 2) {

        Resource resource = buildFileDownloadResource(downloadUrl);
        if (destFile.exists()) {
            String md5Hash = ODKFileUtils.getMd5Hash(appName, destFile);
            resource.header(HttpHeaders.IF_NONE_MATCH, md5Hash);
        }

        ClientResponse response = null;
        try {
            response = resource.get();
            int statusCode = response.getStatusCode();

            if (statusCode != HttpStatus.SC_OK) {
                response.consumeContent();
                if (statusCode == HttpStatus.SC_UNAUTHORIZED) {
                    // clear the cookies -- should not be necessary?
                    // ss: might just be a collect thing?
                }
                logger.warn("downloading " + downloadUrl.toString() + " returns " + statusCode);
                return statusCode;
            }

            if (!response.getHeaders().containsKey(ApiConstants.OPEN_DATA_KIT_VERSION_HEADER)) {
                response.consumeContent();
                logger.warn("downloading " + downloadUrl.toString() + " appears to have been redirected.");
                return 302;
            }

            File tmp = new File(destFile.getParentFile(), destFile.getName() + ".tmp");
            int totalLen = 0;
            InputStream is = null;
            BufferedOutputStream os = null;
            try {
                // open the InputStream of the (uncompressed) entity body...
                is = response.getEntity(InputStream.class);
                os = new BufferedOutputStream(new FileOutputStream(tmp));

                // write connection to temporary file
                byte buf[] = new byte[8192];
                int len;
                while ((len = is.read(buf, 0, buf.length)) >= 0) {
                    if (len != 0) {
                        totalLen += len;
                        os.write(buf, 0, len);
                    }
                }
                is.close();
                is = null;

                os.flush();
                os.close();
                os = null;

                success = tmp.renameTo(destFile);
            } catch (Exception e) {
                // most likely a socket timeout
                e.printStackTrace();
                logger.error("downloading " + downloadUrl.toString() + " failed after " + totalLen + " bytes: "
                        + e.toString());
                try {
                    // signal to the framework that this socket is hosed.
                    // with the various nested streams, this may not work...
                    is.reset();
                } catch (Exception ex) {
                    // ignore
                }
                throw e;
            } finally {
                if (os != null) {
                    try {
                        os.close();
                    } catch (Exception e) {
                        // no-op
                    }
                }
                if (is != null) {
                    try {
                        // ensure stream is consumed...
                        byte buf[] = new byte[8192];
                        while (is.read(buf) >= 0)
                            ;
                    } catch (Exception e) {
                        // no-op
                    }
                    try {
                        is.close();
                    } catch (Exception e) {
                        // no-op
                    }
                }
                if (tmp.exists()) {
                    tmp.delete();
                }
                response.consumeContent();
            }
        } catch (ClientWebException e) {
            e.printStackTrace();
            ;
            if (response != null) {
                response.consumeContent();
            }
            if (attemptCount != 1) {
                throw e;
            }
        }
    }
    return HttpStatus.SC_OK;
}

From source file:com.lion328.xenonlauncher.proxy.HttpDataHandler.java

@Override
public boolean process(Socket client, Socket server) throws Exception {
    InputStream clientIn = client.getInputStream();
    clientIn.mark(65536);//w  w w.  j  a  va2s .c  o  m

    try {
        DefaultBHttpServerConnection httpClient = new DefaultBHttpServerConnection(8192);
        httpClient.bind(client);
        httpClient.setSocketTimeout(timeout);

        DefaultBHttpClientConnection httpServer = new DefaultBHttpClientConnection(8192);
        httpServer.bind(server);

        HttpCoreContext context = HttpCoreContext.create();
        context.setAttribute("client.socket", client);
        context.setAttribute("server.socket", server);

        HttpEntityEnclosingRequest request;

        do {
            HttpRequest rawRequest = httpClient.receiveRequestHeader();

            if (rawRequest instanceof HttpEntityEnclosingRequest) {
                request = (HttpEntityEnclosingRequest) rawRequest;
            } else {
                request = new BasicHttpEntityEnclosingRequest(rawRequest.getRequestLine());
                request.setHeaders(rawRequest.getAllHeaders());
            }

            httpClient.receiveRequestEntity(request);

            HttpResponse response = new BasicHttpResponse(
                    new BasicStatusLine(HttpVersion.HTTP_1_1, HttpStatus.SC_OK, "OK"));

            boolean sent = false;

            for (Map.Entry<Integer, HttpRequestHandler> entry : handlers.entrySet()) {
                entry.getValue().handle(request, response, context);

                if (context.getAttribute("response.set") instanceof HttpResponse) {
                    response = (HttpResponse) context.getAttribute("response.set");
                }

                if (context.getAttribute("pipeline.end") == Boolean.TRUE) {
                    break;
                }

                if (context.getAttribute("response.need-original") == Boolean.TRUE && !sent) {
                    httpServer.sendRequestHeader(request);
                    httpServer.sendRequestEntity(request);
                    response = httpServer.receiveResponseHeader();
                    httpServer.receiveResponseEntity(response);

                    entry.getValue().handle(request, response, context);

                    context.removeAttribute("response.need-original");
                    context.setAttribute("request.sent", true);

                    sent = true;
                }
            }

            if (context.getAttribute("response.sent") != Boolean.TRUE) {
                httpClient.sendResponseHeader(response);

                if (response.getEntity() != null) {
                    httpClient.sendResponseEntity(response);
                }
            }
        } while (request.getFirstHeader("Connection").getValue().equals("keep-alive"));

        return true;
    } catch (ProtocolException e) {
        clientIn.reset();
        return false;
    } catch (ConnectionClosedException e) {
        return true;
    }
}

From source file:org.opendatakit.sync.aggregate.AggregateSynchronizer.java

/**
 *
 * @param destFile/*from w w w  . ja  va2s  .  co m*/
 * @param downloadUrl
 * @return true if the download was successful
 * @throws Exception
 */
private int downloadFile(File destFile, URI downloadUrl) throws Exception {

    // WiFi network connections can be renegotiated during a large form
    // download
    // sequence.
    // This will cause intermittent download failures. Silently retry once
    // after
    // each
    // failure. Only if there are two consecutive failures, do we abort.
    boolean success = false;
    int attemptCount = 0;
    while (!success && attemptCount++ <= 2) {

        Resource resource = buildFileDownloadResource(downloadUrl);
        if (destFile.exists()) {
            String md5Hash = ODKFileUtils.getMd5Hash(appName, destFile);
            resource.header(HttpHeaders.IF_NONE_MATCH, md5Hash);
        }

        ClientResponse response = null;
        try {
            response = resource.get();
            int statusCode = response.getStatusCode();

            if (statusCode != HttpStatus.SC_OK) {
                response.consumeContent();
                if (statusCode == HttpStatus.SC_UNAUTHORIZED) {
                    // clear the cookies -- should not be necessary?
                    // ss: might just be a collect thing?
                }
                log.w(LOGTAG, "downloading " + downloadUrl.toString() + " returns " + statusCode);
                return statusCode;
            }

            if (!response.getHeaders().containsKey(ApiConstants.OPEN_DATA_KIT_VERSION_HEADER)) {
                response.consumeContent();
                log.w(LOGTAG, "downloading " + downloadUrl.toString() + " appears to have been redirected.");
                return 302;
            }

            File tmp = new File(destFile.getParentFile(), destFile.getName() + ".tmp");
            int totalLen = 0;
            InputStream is = null;
            BufferedOutputStream os = null;
            try {
                // open the InputStream of the (uncompressed) entity body...
                is = response.getEntity(InputStream.class);
                os = new BufferedOutputStream(new FileOutputStream(tmp));

                // write connection to temporary file
                byte buf[] = new byte[8192];
                int len;
                while ((len = is.read(buf, 0, buf.length)) >= 0) {
                    if (len != 0) {
                        totalLen += len;
                        os.write(buf, 0, len);
                    }
                }
                is.close();
                is = null;

                os.flush();
                os.close();
                os = null;

                success = tmp.renameTo(destFile);
            } catch (Exception e) {
                // most likely a socket timeout
                e.printStackTrace();
                log.e(LOGTAG, "downloading " + downloadUrl.toString() + " failed after " + totalLen + " bytes: "
                        + e.toString());
                try {
                    // signal to the framework that this socket is hosed.
                    // with the various nested streams, this may not work...
                    is.reset();
                } catch (Exception ex) {
                    // ignore
                }
                throw e;
            } finally {
                if (os != null) {
                    try {
                        os.close();
                    } catch (Exception e) {
                        // no-op
                    }
                }
                if (is != null) {
                    try {
                        // ensure stream is consumed...
                        byte buf[] = new byte[8192];
                        while (is.read(buf) >= 0)
                            ;
                    } catch (Exception e) {
                        // no-op
                    }
                    try {
                        is.close();
                    } catch (Exception e) {
                        // no-op
                    }
                }
                if (tmp.exists()) {
                    tmp.delete();
                }
                response.consumeContent();
            }
        } catch (ClientWebException e) {
            log.printStackTrace(e);
            if (response != null) {
                response.consumeContent();
            }
            if (attemptCount != 1) {
                throw e;
            }
        }
    }
    return HttpStatus.SC_OK;
}

From source file:au.org.theark.study.util.DataUploader.java

public StringBuffer uploadAndReportSubjectConsentDataFile(InputStream inputStream, long size, String fileFormat,
        char delimChar) throws FileFormatException, ArkSystemException {
    uploadReport = new StringBuffer();
    long rowCount = 0;
    long insertFieldsCount = 0;
    long updateFieldsCount = 0;
    List<Consent> consentFieldsToUpdate = new ArrayList<Consent>();
    List<Consent> consentFieldsToInsert = new ArrayList<Consent>();
    delimiterCharacter = delimChar;//w  ww . jav  a  2 s  . c o  m

    InputStreamReader inputStreamReader = null;
    CsvReader csvReader = null;
    DecimalFormat decimalFormat = new DecimalFormat("0.00");

    if (fileFormat.equalsIgnoreCase("XLS")) {
        Workbook w;
        try {
            w = Workbook.getWorkbook(inputStream);
            delimiterCharacter = ',';
            XLStoCSV xlsToCsv = new XLStoCSV(delimiterCharacter);
            inputStream = xlsToCsv.convertXlsToCsv(w);
            inputStream.reset();
        } catch (BiffException e) {
            log.error(e.getMessage());
        } catch (IOException e) {
            log.error(e.getMessage());
        }
    }

    try {
        inputStreamReader = new InputStreamReader(inputStream);
        csvReader = new CsvReader(inputStreamReader, delimiterCharacter);
        csvReader.readHeaders();
        String[] stringLineArray;

        List<StudyComp> studyComList = iArkCommonService.getStudyComponentByStudy(study);
        Map<String, StudyComp> studyCompMap = new HashMap<String, StudyComp>();
        for (StudyComp studuComp : studyComList) {
            studyCompMap.put(studuComp.getName().toUpperCase(), studuComp);
        }

        List<StudyCompStatus> studyCompStatusList = iArkCommonService.getStudyComponentStatus();
        Map<String, StudyCompStatus> studyCompStatusMap = new HashMap<String, StudyCompStatus>();
        for (StudyCompStatus studyCompStatus : studyCompStatusList) {
            studyCompStatusMap.put(studyCompStatus.getName().toUpperCase(), studyCompStatus);
        }

        List<ConsentType> consentTypeList = iArkCommonService.getConsentType();
        Map<String, ConsentType> consentTypeMap = new HashMap<String, ConsentType>();
        for (ConsentType consentType : consentTypeList) {
            consentTypeMap.put(consentType.getName().toUpperCase(), consentType);
        }

        List<ConsentStatus> consentStatusList = iArkCommonService.getConsentStatus();
        Map<String, ConsentStatus> consentStatusMap = new HashMap<String, ConsentStatus>();
        for (ConsentStatus consentStatus : consentStatusList) {
            consentStatusMap.put(consentStatus.getName().toUpperCase(), consentStatus);
        }

        List<YesNo> consentDownloadedList = iArkCommonService.getYesNoList();
        Map<String, YesNo> consentDownloadedMap = new HashMap<String, YesNo>();
        for (YesNo consentDownloaded : consentDownloadedList) {
            consentDownloadedMap.put(consentDownloaded.getName().toUpperCase(), consentDownloaded);
        }

        ConsentVO consentVO = new ConsentVO();
        consentVO.getConsent().setStudy(study);
        int subjectUidIndex = csvReader.getIndex("SUBJECTUID");
        int studyComponentIndex = csvReader.getIndex("STUDY_COMPONENT");
        int studyComponentStatusIndex = csvReader.getIndex("STUDY_COMPONENT_STATUS");
        int consentTypeIndex = csvReader.getIndex("CONSENT_TYPE");
        int consentStatusIndex = csvReader.getIndex("CONSENT_STATUS");
        int consentDownloadedIndex = csvReader.getIndex("CONSENT_DOWNLOADED");
        int consentedByIndex = csvReader.getIndex("CONSENTED_BY");
        int consentDateIndex = csvReader.getIndex("CONSENT_DATE");
        int commentIndex = csvReader.getIndex("COMMENT");
        int completedDateIndex = csvReader.getIndex("COMPLETED_DATE");

        while (csvReader.readRecord()) {
            ++rowCount;
            stringLineArray = csvReader.getValues();
            String subjectUID = stringLineArray[subjectUidIndex];
            LinkSubjectStudy subject = iArkCommonService.getSubjectByUID(subjectUID, study);

            consentVO.getConsent().setLinkSubjectStudy(subject);
            consentVO.getConsent()
                    .setStudyComp(studyCompMap.get(stringLineArray[studyComponentIndex].toUpperCase()));

            List<Consent> existingConcentList = iStudyService.searchConsent(consentVO);

            if (existingConcentList.size() > 0) {
                ++updateFieldsCount;
                Consent existingConsent = existingConcentList.get(0);
                existingConsent.setStudyComponentStatus(
                        studyCompStatusMap.get(stringLineArray[studyComponentStatusIndex].toUpperCase()));
                existingConsent
                        .setConsentType(consentTypeMap.get(stringLineArray[consentTypeIndex].toUpperCase()));
                existingConsent.setConsentStatus(
                        consentStatusMap.get(stringLineArray[consentStatusIndex].toUpperCase()));
                existingConsent.setConsentDownloaded(
                        consentDownloadedMap.get(stringLineArray[consentDownloadedIndex].toUpperCase()));

                if (stringLineArray.length > consentedByIndex) {
                    existingConsent.setConsentedBy(stringLineArray[consentedByIndex]);
                }

                if (stringLineArray.length > consentDateIndex) {
                    String consentDate = stringLineArray[consentDateIndex];
                    if (consentDate != null && consentDate.trim().length() > 0) {
                        existingConsent.setConsentDate(simpleDateFormat.parse(consentDate));
                    }
                }

                if (stringLineArray.length > commentIndex) {
                    existingConsent.setComments(stringLineArray[commentIndex]);
                }

                if ("Completed".equalsIgnoreCase(existingConsent.getStudyComponentStatus().getName())) {
                    try {
                        existingConsent
                                .setCompletedDate(simpleDateFormat.parse(stringLineArray[completedDateIndex]));
                    } catch (Exception e) {
                        existingConsent.setCompletedDate(null);
                    }
                } else {
                    existingConsent.setCompletedDate(null);
                }
                consentFieldsToUpdate.add(existingConsent);
            } else {
                ++insertFieldsCount;
                Consent consent = new Consent();
                consent.setStudy(study);
                consent.setLinkSubjectStudy(subject);
                consent.setStudyComp(
                        studyCompMap.get(stringLineArray[studyComponentIndex].toUpperCase().trim()));
                consent.setStudyComponentStatus(studyCompStatusMap
                        .get(stringLineArray[studyComponentStatusIndex].toUpperCase().trim()));
                consent.setConsentType(
                        consentTypeMap.get(stringLineArray[consentTypeIndex].toUpperCase().trim()));
                consent.setConsentStatus(
                        consentStatusMap.get(stringLineArray[consentStatusIndex].toUpperCase().trim()));
                consent.setConsentDownloaded(
                        consentDownloadedMap.get(stringLineArray[consentDownloadedIndex].toUpperCase().trim()));

                if (stringLineArray.length > consentedByIndex) {
                    consent.setConsentedBy(stringLineArray[consentedByIndex]);
                }

                if (stringLineArray.length > consentDateIndex) {
                    String consentDate = stringLineArray[consentDateIndex].trim();
                    if (consentDate != null && consentDate.trim().length() > 0) {
                        try {
                            consent.setConsentDate(simpleDateFormat.parse(consentDate));
                        } catch (Exception e) {
                            consent.setConsentDate(simpleDateFormat.parse(null));
                        }
                    }
                }

                if (stringLineArray.length > commentIndex) {
                    consent.setComments(stringLineArray[commentIndex].trim());
                }

                if ("Completed".equalsIgnoreCase(consent.getStudyComponentStatus().getName())) {
                    try {
                        consent.setCompletedDate(
                                simpleDateFormat.parse(stringLineArray[completedDateIndex].trim()));
                    } catch (Exception e) {
                        consent.setCompletedDate(null);
                    }
                }
                consentFieldsToInsert.add(consent);
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
        throw new ArkSystemException(e.getMessage());
    } finally {
        uploadReport.append("Total file size: ");
        uploadReport.append(decimalFormat.format(size / 1024.0 / 1024.0));
        uploadReport.append(" MB");
        uploadReport.append("\n");

        if (csvReader != null) {
            try {
                csvReader.close();
            } catch (Exception ex) {
                log.error("Cleanup operation failed: csvRdr.close()", ex);
            }
        }
        if (inputStreamReader != null) {
            try {
                inputStreamReader.close();
            } catch (Exception ex) {
                log.error("Cleanup operation failed: isr.close()", ex);
            }
        }

    }

    uploadReport.append("Process ");
    uploadReport.append(rowCount);
    uploadReport.append(" rows of data");
    uploadReport.append("\n");

    uploadReport.append(insertFieldsCount);
    uploadReport.append(" fields were inserted.");
    uploadReport.append("\n");
    uploadReport.append(updateFieldsCount);
    uploadReport.append(" fields were updated.");
    uploadReport.append("\n");

    try {
        iStudyService.processSubjectConsentBatch(consentFieldsToUpdate, consentFieldsToInsert);
    } catch (Exception e) {
        e.printStackTrace();
        throw new ArkSystemException(e.getMessage());
    }

    return uploadReport;
}