Example usage for java.nio.file Files readAllBytes

List of usage examples for java.nio.file Files readAllBytes

Introduction

In this page you can find the example usage for java.nio.file Files readAllBytes.

Prototype

public static byte[] readAllBytes(Path path) throws IOException 

Source Link

Document

Reads all the bytes from a file.

Usage

From source file:com.qmetry.qaf.automation.integration.qmetry.qmetry6.QMetryRestWebservice.java

/**
 * attach log using run id/*  w  ww .j a  v  a 2  s  .c om*/
 * 
 * @param token
 *            - token generate using username and password
 * @param scope
 *            : project:release:cycle
 * @param testCaseRunId
 * @param filePath
 *            - absolute path of file to be attached
 * @return
 */
public int attachTestLogsUsingRunId(long testCaseRunId, File filePath) {
    try {
        SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd-HHmmss");

        final String CurrentDate = format.format(new Date());
        Path path = Paths.get(filePath.toURI());
        byte[] outFileArray = Files.readAllBytes(path);

        if (outFileArray != null) {
            CloseableHttpClient httpclient = HttpClients.createDefault();
            try {
                HttpPost httppost = new HttpPost(serviceUrl + "/rest/attachments/testLog");

                MultipartEntityBuilder builder = MultipartEntityBuilder.create();
                builder.setMode(HttpMultipartMode.BROWSER_COMPATIBLE);

                FileBody bin = new FileBody(filePath);
                builder.addTextBody("desc", "Attached on " + CurrentDate,
                        org.apache.http.entity.ContentType.TEXT_PLAIN);
                builder.addTextBody("type", "TCR", org.apache.http.entity.ContentType.TEXT_PLAIN);
                builder.addTextBody("entityId", String.valueOf(testCaseRunId),
                        org.apache.http.entity.ContentType.TEXT_PLAIN);
                builder.addPart("file", bin);

                HttpEntity reqEntity = builder.build();
                httppost.setEntity(reqEntity);
                httppost.addHeader("usertoken", token);
                httppost.addHeader("scope", scope);

                CloseableHttpResponse response = httpclient.execute(httppost);
                String str = null;
                try {
                    str = EntityUtils.toString(response.getEntity());
                } catch (Exception e) {
                    e.printStackTrace();
                } finally {
                }
                JsonElement gson = new Gson().fromJson(str, JsonElement.class);
                JsonElement data = gson.getAsJsonObject().get("data");
                int id = Integer.parseInt(data.getAsJsonArray().get(0).getAsJsonObject().get("id").toString());
                return id;
            } finally {
                httpclient.close();
            }
        } else {
            System.out.println(filePath + " file does not exists");
        }
    } catch (Exception ex) {
        System.out.println("Error in attaching file - " + filePath);
        System.out.println(ex.getMessage());
    }
    return 0;
}

From source file:eu.sonata.nfv.nec.validate.cli.Main.java

/**
 * Reads a file from the given path with a given charset.
 *
 * @param path The path to the file.//  ww w.  ja  v a  2s.com
 * @param encoding The Charset of the file.
 * @return A string that contains the file content.
 */
private static String readFile(String path, Charset encoding) throws IOException {
    return new String(Files.readAllBytes(Paths.get(path)), encoding);
}

From source file:edu.harvard.iq.dataverse.api.imports.ImportServiceBean.java

@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, HarvestingClient harvestingClient,
        String harvestIdentifier, String metadataFormat, File metadataFile, PrintWriter cleanupLog)
        throws ImportException, IOException {
    if (harvestingClient == null || harvestingClient.getDataverse() == null) {
        throw new ImportException(
                "importHarvestedDataset called wiht a null harvestingClient, or an invalid harvestingClient.");
    }//from  ww  w .j  a  v  a  2s.co  m
    Dataverse owner = harvestingClient.getDataverse();
    Dataset importedDataset = null;

    DatasetDTO dsDTO = null;
    String json = null;

    // TODO: 
    // At the moment (4.5; the first official "export/harvest release"), there
    // are 3 supported metadata formats: DDI, DC and native Dataverse metadata 
    // encoded in JSON. The 2 XML formats are handled by custom implementations;
    // each of the 2 implementations uses its own parsing approach. (see the 
    // ImportDDIServiceBean and ImportGenerciServiceBean for details). 
    // TODO: Need to create a system of standardized import plugins - similar to Stephen
    // Kraffmiller's export modules; replace the logic below with clean
    // programmatic lookup of the import plugin needed. 

    if ("ddi".equalsIgnoreCase(metadataFormat) || "oai_ddi".equals(metadataFormat)
            || metadataFormat.toLowerCase().matches("^oai_ddi.*")) {
        try {
            String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath()));
            // TODO: 
            // import type should be configurable - it should be possible to 
            // select whether you want to harvest with or without files, 
            // ImportType.HARVEST vs. ImportType.HARVEST_WITH_FILES
            logger.fine("importing DDI " + metadataFile.getAbsolutePath());
            dsDTO = importDDIService.doImport(ImportType.HARVEST_WITH_FILES, xmlToParse);
        } catch (IOException | XMLStreamException | ImportException e) {
            throw new ImportException(
                    "Failed to process DDI XML record: " + e.getClass() + " (" + e.getMessage() + ")");
        }
    } else if ("dc".equalsIgnoreCase(metadataFormat) || "oai_dc".equals(metadataFormat)) {
        logger.fine("importing DC " + metadataFile.getAbsolutePath());
        try {
            String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath()));
            dsDTO = importGenericService.processOAIDCxml(xmlToParse);
        } catch (IOException | XMLStreamException e) {
            throw new ImportException(
                    "Failed to process Dublin Core XML record: " + e.getClass() + " (" + e.getMessage() + ")");
        }
    } else if ("dataverse_json".equals(metadataFormat)) {
        // This is Dataverse metadata already formatted in JSON. 
        // Simply read it into a string, and pass to the final import further down:
        logger.fine(
                "Attempting to import custom dataverse metadata from file " + metadataFile.getAbsolutePath());
        json = new String(Files.readAllBytes(metadataFile.toPath()));
    } else {
        throw new ImportException("Unsupported import metadata format: " + metadataFormat);
    }

    if (json == null) {
        if (dsDTO != null) {
            // convert DTO to Json, 
            Gson gson = new GsonBuilder().setPrettyPrinting().create();
            json = gson.toJson(dsDTO);
            logger.fine("JSON produced for the metadata harvested: " + json);
        } else {
            throw new ImportException(
                    "Failed to transform XML metadata format " + metadataFormat + " into a DatasetDTO");
        }
    }

    JsonReader jsonReader = Json.createReader(new StringReader(json));
    JsonObject obj = jsonReader.readObject();
    //and call parse Json to read it into a dataset   
    try {
        JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService);
        parser.setLenient(true);
        Dataset ds = parser.parseDataset(obj);

        // For ImportType.NEW, if the metadata contains a global identifier, and it's not a protocol
        // we support, it should be rejected.
        // (TODO: ! - add some way of keeping track of supported protocols!)
        //if (ds.getGlobalId() != null && !ds.getProtocol().equals(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, ""))) {
        //    throw new ImportException("Could not register id " + ds.getGlobalId() + ", protocol not supported");
        //}
        ds.setOwner(owner);
        ds.getLatestVersion().setDatasetFields(ds.getLatestVersion().initDatasetFields());

        // Check data against required contraints
        List<ConstraintViolation<DatasetField>> violations = ds.getVersions().get(0).validateRequired();
        if (!violations.isEmpty()) {
            // For migration and harvest, add NA for missing required values
            for (ConstraintViolation<DatasetField> v : violations) {
                DatasetField f = v.getRootBean();
                f.setSingleValue(DatasetField.NA_VALUE);
            }
        }

        // Check data against validation constraints
        // If we are migrating and "scrub migration data" is true we attempt to fix invalid data
        // if the fix fails stop processing of this file by throwing exception
        Set<ConstraintViolation> invalidViolations = ds.getVersions().get(0).validate();
        ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
        Validator validator = factory.getValidator();
        if (!invalidViolations.isEmpty()) {
            for (ConstraintViolation<DatasetFieldValue> v : invalidViolations) {
                DatasetFieldValue f = v.getRootBean();
                boolean fixed = false;
                boolean converted = false;
                // TODO: Is this scrubbing something we want to continue doing? 
                if (settingsService.isTrueForKey(SettingsServiceBean.Key.ScrubMigrationData, false)) {
                    fixed = processMigrationValidationError(f, cleanupLog, metadataFile.getName());
                    converted = true;
                    if (fixed) {
                        Set<ConstraintViolation<DatasetFieldValue>> scrubbedViolations = validator.validate(f);
                        if (!scrubbedViolations.isEmpty()) {
                            fixed = false;
                        }
                    }
                }
                if (!fixed) {
                    String msg = "Data modified - File: " + metadataFile.getName() + "; Field: "
                            + f.getDatasetField().getDatasetFieldType().getDisplayName() + "; "
                            + "Invalid value:  '" + f.getValue() + "'" + " Converted Value:'"
                            + DatasetField.NA_VALUE + "'";
                    cleanupLog.println(msg);
                    f.setValue(DatasetField.NA_VALUE);

                }
            }
        }

        // A Global ID is required, in order for us to be able to harvest and import
        // this dataset:
        if (StringUtils.isEmpty(ds.getGlobalId())) {
            throw new ImportException("The harvested metadata record with the OAI server identifier "
                    + harvestIdentifier
                    + " does not contain a global unique identifier that we could recognize, skipping.");
        }

        ds.setHarvestedFrom(harvestingClient);
        ds.setHarvestIdentifier(harvestIdentifier);

        Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId());

        if (existingDs != null) {
            // If this dataset already exists IN ANOTHER DATAVERSE
            // we are just going to skip it!
            if (existingDs.getOwner() != null && !owner.getId().equals(existingDs.getOwner().getId())) {
                throw new ImportException("The dataset with the global id " + ds.getGlobalId()
                        + " already exists, in the dataverse " + existingDs.getOwner().getAlias()
                        + ", skipping.");
            }
            // And if we already have a dataset with this same id, in this same
            // dataverse, but it is  LOCAL dataset (can happen!), we're going to 
            // skip it also: 
            if (!existingDs.isHarvested()) {
                throw new ImportException("A LOCAL dataset with the global id " + ds.getGlobalId()
                        + " already exists in this dataverse; skipping.");
            }
            // For harvested datasets, there should always only be one version.
            // We will replace the current version with the imported version.
            if (existingDs.getVersions().size() != 1) {
                throw new ImportException("Error importing Harvested Dataset, existing dataset has "
                        + existingDs.getVersions().size() + " versions");
            }
            // Purge all the SOLR documents associated with this client from the 
            // index server: 
            indexService.deleteHarvestedDocuments(existingDs);
            // files from harvested datasets are removed unceremoniously, 
            // directly in the database. no need to bother calling the 
            // DeleteFileCommand on them.
            for (DataFile harvestedFile : existingDs.getFiles()) {
                DataFile merged = em.merge(harvestedFile);
                em.remove(merged);
                harvestedFile = null;
            }
            // TODO: 
            // Verify what happens with the indexed files in SOLR? 
            // are they going to be overwritten by the reindexing of the dataset?
            existingDs.setFiles(null);
            Dataset merged = em.merge(existingDs);
            engineSvc.submit(new DestroyDatasetCommand(merged, dataverseRequest));
            importedDataset = engineSvc
                    .submit(new CreateDatasetCommand(ds, dataverseRequest, false, ImportType.HARVEST));

        } else {
            importedDataset = engineSvc
                    .submit(new CreateDatasetCommand(ds, dataverseRequest, false, ImportType.HARVEST));
        }

    } catch (JsonParseException | ImportException | CommandException ex) {
        logger.fine("Failed to import harvested dataset: " + ex.getClass() + ": " + ex.getMessage());
        FileOutputStream savedJsonFileStream = new FileOutputStream(
                new File(metadataFile.getAbsolutePath() + ".json"));
        byte[] jsonBytes = json.getBytes();
        int i = 0;
        while (i < jsonBytes.length) {
            int chunkSize = i + 8192 <= jsonBytes.length ? 8192 : jsonBytes.length - i;
            savedJsonFileStream.write(jsonBytes, i, chunkSize);
            i += chunkSize;
            savedJsonFileStream.flush();
        }
        savedJsonFileStream.close();
        logger.info("JSON produced saved in " + metadataFile.getAbsolutePath() + ".json");
        throw new ImportException(
                "Failed to import harvested dataset: " + ex.getClass() + " (" + ex.getMessage() + ")", ex);
    }
    return importedDataset;
}

From source file:org.trustedanalytics.h2oscoringengine.publisher.PublisherIntegrationTest.java

private byte[] prepareModelJavaFile() throws IOException {
    TestCompilationResourcesBuilder compilationResourcesBuilder = new TestCompilationResourcesBuilder();
    return Files.readAllBytes(compilationResourcesBuilder.prepareModelJavaFile(testModelName));
}

From source file:com.sesnu.orion.web.service.ReportService.java

public String generatePayAuthReport(Approval app, String state) throws DocumentException, IOException {
    Payment pay = payDao.get(app.getForId());
    OrderView order = orderDao.get(pay.getOrderRef());
    Item item = itemDao.get(order.getItemId());
    List<DuLicenseView> licenses = licenseDao.listByOrderId(order.getId());
    String git = "NA";
    if (licenses.size() > 0) {
        git = licenses.get(0).getGit();/*from w ww . java2  s .  com*/
    }

    String orginalHtml = conf.getFile("payAuth.html");

    String editedHtml = orginalHtml.replace("ORDER_REF", order.getInvNo());
    editedHtml = setPaths(editedHtml, state);
    editedHtml = editedHtml.replace("PRODUCT_NAME", item.getName());
    String bl = order.getBl() == null ? "NA" : order.getBl();
    editedHtml = editedHtml.replace("BILL_OF_LOADING", bl);
    editedHtml = editedHtml.replace("BRAND_NAME", item.getBrand());
    editedHtml = editedHtml.replace("G_I_T", git);
    editedHtml = editedHtml.replace("PACKAGING",
            order.getBaseSize().toString() + order.getBaseUnit() + "X" + order.getQtyPerPack() + "pcs");
    editedHtml = editedHtml.replace("QTY_PER_CONT", order.getPckPerCont().toString());
    editedHtml = editedHtml.replace("DESTINATION", order.getDestinationPort());
    editedHtml = editedHtml.replace("QUANTITY", order.getContQnt() + "X" + order.getContSize() + "'");
    editedHtml = editedHtml.replace("PAYMENT_DATE", pay.getUpdatedOn());

    // create request body
    StringBuilder sb = new StringBuilder();
    sb.append("<tr>");
    sb.append("<td>" + pay.getName() + "</td>");
    sb.append("<td>" + pay.getPaymentMethod() + "</td>");
    sb.append("<td>" + pay.getCurr() + "</td>");
    sb.append("<td>" + pay.getDeposit() + "</td>");
    sb.append("<td>" + pay.getEstimate() + "</td>");
    sb.append("<td>" + pay.getPaymentAmount() + "</td>");
    String rmrk = pay.getRemark() == null ? "" : pay.getRemark();
    sb.append("<td>" + rmrk + "</td>");
    sb.append("</tr>");
    editedHtml = editedHtml.replace("BID_DATA_TABLE", sb.toString());

    String emailTo = app.getRequestedBy() + " [" + (userDao.getUserName(app.getRequestedBy())).getEmail() + "]";
    String emailCC = app.getApprover() + " [" + (userDao.getUserName(app.getApprover())).getEmail() + "]";
    editedHtml = editedHtml.replace("EMAIL_TO", emailTo);
    editedHtml = editedHtml.replace("EMAIL_CC", emailCC);

    if (!state.equals("preview")) {
        editedHtml = editedHtml.replace("SIGNATURE", app.getApprover());
        editedHtml = editedHtml.replace("APPROVED_DATE", new Date().toGMTString());
        String pdfFilePath = util.convertToPdf(editedHtml); // convert to pdf
        Path path = Paths.get(pdfFilePath);
        byte[] data = Files.readAllBytes(path); // convert to byte array
        String[] frag = pdfFilePath.split("/");
        String fileName = frag[frag.length - 1]; // get file name
        util.writeToS3(data, fileName); // write to s3
        sendApprovalEmail(app, pdfFilePath, order);
        Files.deleteIfExists(path);

        Document doc = new Document(order.getId(), fileName, "Payment[" + pay.getName() + "]",
                Util.parseDate(new Date()), "Approval");
        docDao.saveOrUpdate(doc);
    } else {
        editedHtml = editedHtml.replace("APPROVED_DATE", "");
    }

    return editedHtml;
}

From source file:com.khepry.utilities.GenericUtilities.java

public static String transformLogViaXSLT(String logFilePath, String xslFilePath)
        throws TransformerConfigurationException, TransformerException, IOException {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    File logFile = new File(logFilePath);
    if (logFile.exists()) {
        File xslFile = new File(xslFilePath);
        if (xslFile.exists()) {
            TransformerFactory factory = TransformerFactory.newInstance();
            Source xslt = new StreamSource(new File(xslFilePath));
            Transformer transformer = factory.newTransformer(xslt);
            Source logXmlText = new StreamSource(new File(logFilePath));
            transformer.transform(logXmlText, new StreamResult(baos));
            return baos.toString();
        } else {//w  w  w  .j  ava 2s .  com
            return new String(Files.readAllBytes(Paths.get(logFilePath)));
        }
    } else {
        return baos.toString();
    }
}

From source file:io.hops.hopsworks.api.zeppelin.util.ZeppelinResource.java

private String readConfigFile(File path) throws IOException {
    // read contents from file
    if (!path.exists()) {
        throw new IOException("File not found: " + path);
    }//ww w.ja  v a  2  s.  c o m
    return new String(Files.readAllBytes(path.toPath()));
}

From source file:org.trustedanalytics.h2oscoringengine.publisher.PublisherIntegrationTest.java

private byte[] prepareGenModelLib() throws IOException {
    TestCompilationResourcesBuilder compilationResourcesBuilder = new TestCompilationResourcesBuilder();
    return Files.readAllBytes(compilationResourcesBuilder.prepareLibraryFile());
}

From source file:main.java.refinement_class.Useful.java

public static String readFile_local_tmp(String path, Charset encoding) throws IOException {
    byte[] encoded = Files.readAllBytes(Paths.get(path));
    return new String(encoded, encoding);
}

From source file:by.belisa.util.OnlineConvert.java

/**
 *
 * convert Make a API call to convert file/url/hash based on parameters and
 * return xml response.//from ww w .  j a  v a2s.  c om
 *
 * @param targetTypeMethod To which file you want to convert (like convert-to-jpg,
 * convert-to-mp3)
 * @param sourceType The source types you can set (URL, FILE_PATH and
 * FILE_BASE64)
 * @param source Source can be provide based on sourceType if sourceType =
 * URL you have to provide url string to this param.
 * @param sourceName Provide file Name. This param used only with
 * sourceType= FILE_PATH or FILE_BASE64
 * @param formatOptions Provide file conversion required extra parameters as
 * array using this param.
 * @param notificationUrl For set notification url for api actions.
 * @return xml response string from server.
 * @throws java.lang.Exception when required values of variables not found
 * in an Instance of class or in the arguments
 */
public String convert(String targetTypeMethod, String sourceType, String source, String sourceName,
        Map<String, String> formatOptions, String notificationUrl) throws Exception {
    if (null == this.targetTypeOptions.get(targetTypeMethod)) {
        throw new Exception("Invalid Target Type.");
    }

    this.targetType = this.targetTypeOptions.get(targetTypeMethod);

    if (null == this.sourceTypeOptions.get(sourceType)) {
        throw new Exception("Invalid Source Type.");
    }

    this.sourceType = sourceType;

    if (SOURCE_TYPE_FILE_BASE64.equals(this.sourceType) || SOURCE_TYPE_FILE_PATH.equals(this.sourceType)) {
        if (source == null || source.length() < 1) {
            throw new Exception("Invalid Source Name.");
        }
    }

    if (this.sourceType.equals(SOURCE_TYPE_FILE_PATH)) {
        if (!new File(source).exists()) {
            throw new Exception("File not found: " + source);
        }
        Path path = Paths.get(source);
        source = new String(Base64.encode(Files.readAllBytes(path)));
        //source = Base64.encodeBase64String(Files.readAllBytes(path));
    }

    Map<String, String> data = new HashMap<String, String>();
    data.put("apiKey", this.apiKey);
    data.put("targetType", this.targetTypeOptions.get(targetTypeMethod));
    data.put("targetMethod", targetTypeMethod);
    data.put("testMode", (String.valueOf(this.testMode)));
    data.put("notificationUrl", notificationUrl);

    String formatOptionsXml = "";
    if (null != formatOptions) {
        formatOptionsXml = this.getFormatMap2XML(formatOptions);
    }

    String apiCallResponse;
    List response;

    if (this.sourceType.equals(OnlineConvert.SOURCE_TYPE_URL)) {
        data.put("sourceUrl", source);
        apiCallResponse = this.apiCall("queue-insert", data, formatOptionsXml);
    } else {
        List<Map> query = new ArrayList<Map>();
        Map<String, String> file = new HashMap<String, String>();
        file.put("fileName", sourceName);
        file.put("fileData", source);
        query.add(OnlineConvert.QUEUE_COMMAN_PARAMS, data);
        query.add(OnlineConvert.QUEUE_FILE_METADATA_PARAMS, file);
        apiCallResponse = this.apiCall("queue-insert", query, formatOptionsXml);
    }

    response = this.getXML2Map(apiCallResponse);
    if (!response.isEmpty()) {
        Map responseStatus = (HashMap) response.get(OnlineConvert.QUEUE_ANSWER_STATUS);
        Map responseParams = (HashMap) response.get(OnlineConvert.QUEUE_ANSWER_PARAMS);
        if (Integer.parseInt(responseStatus.get("code").toString()) == 0) {
            this.hash = (String) responseParams.get("hash");
            this.downloadUrl = (String) responseParams.get("downloadUrl");
        }
    }

    this.url = OnlineConvert.URL;
    return apiCallResponse;
}