Example usage for java.nio.channels Channels newChannel

List of usage examples for java.nio.channels Channels newChannel

Introduction

In this page you can find the example usage for java.nio.channels Channels newChannel.

Prototype

public static WritableByteChannel newChannel(OutputStream out) 

Source Link

Document

Constructs a channel that writes bytes to the given stream.

Usage

From source file:tvhchgen.Service.java

/**
 * Save the content of the Url to the given path
 * @param urlStr/* w  w  w  . j av  a 2 s.c  om*/
 * @param outPath
 * @return 
 */
public boolean saveUrl(String urlStr, String outPath) {
    InputStream is = null;
    try {
        //System.out.println( "Getting: " + urlStr );
        URL url = new URL(urlStr);

        HttpURLConnection conn = (HttpURLConnection) url.openConnection();
        HttpURLConnection.setFollowRedirects(true);
        // allow both GZip and Deflate (ZLib) encodings
        conn.setRequestProperty("Accept-Encoding", "gzip, deflate");
        conn.setRequestProperty("User-Agent", DEFAULT_USER_AGENT);
        conn.setRequestProperty("Referer", DEFAULT_REFERER);
        String encoding = conn.getContentEncoding();
        InputStream inStr;

        // create the appropriate stream wrapper based on
        // the encoding type
        if (encoding != null && encoding.equalsIgnoreCase("gzip")) {
            inStr = new GZIPInputStream(conn.getInputStream());
        } else if (encoding != null && encoding.equalsIgnoreCase("deflate")) {
            inStr = new InflaterInputStream(conn.getInputStream(), new Inflater(true));
        } else {
            inStr = conn.getInputStream();
        }

        //System.out.println( filePath );
        File file = new File(outPath);
        if (!file.exists()) {
            file.createNewFile();

            FileOutputStream fos = new FileOutputStream(file);
            ReadableByteChannel rbc = Channels.newChannel(inStr);
            fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
            fos.close();
            return true;
        }
    } catch (Exception e) {
        System.out.println("Exception: " + e.toString());
    } finally {
        if (is != null) {
            try {
                is.close();
            } catch (Exception e) {
                System.out.println("Exception: " + e.toString());
            }
        }
    }
    return false;
}

From source file:fr.paris.lutece.plugins.directory.web.action.ExportDirectoryAction.java

/**
 * {@inheritDoc}//from   w w  w.  j  a v a 2s.  c  o  m
 */
@Override
public IPluginActionResult process(HttpServletRequest request, HttpServletResponse response,
        AdminUser adminUser, DirectoryAdminSearchFields searchFields) throws AccessDeniedException {
    DefaultPluginActionResult result = new DefaultPluginActionResult();

    String strIdDirectory = request.getParameter(PARAMETER_ID_DIRECTORY);
    int nIdDirectory = DirectoryUtils.convertStringToInt(strIdDirectory);
    Directory directory = DirectoryHome.findByPrimaryKey(nIdDirectory, getPlugin());
    String strIdDirectoryXsl = request.getParameter(PARAMETER_ID_DIRECTORY_XSL);
    int nIdDirectoryXsl = DirectoryUtils.convertStringToInt(strIdDirectoryXsl);
    WorkflowService workflowService = WorkflowService.getInstance();
    boolean bWorkflowServiceEnable = workflowService.isAvailable();
    String strShotExportFinalOutPut = null;
    DirectoryXsl directoryXsl = DirectoryXslHome.findByPrimaryKey(nIdDirectoryXsl, getPlugin());

    // -----------------------------------------------------------------------
    if ((directory == null) || (directoryXsl == null) || !RBACService.isAuthorized(Directory.RESOURCE_TYPE,
            strIdDirectory, DirectoryResourceIdService.PERMISSION_MANAGE_RECORD, adminUser)) {
        throw new AccessDeniedException(
                I18nService.getLocalizedString(MESSAGE_ACCESS_DENIED, request.getLocale()));
    }

    String strFileExtension = directoryXsl.getExtension();
    String strFileName = directory.getTitle() + "." + strFileExtension;
    strFileName = UploadUtil.cleanFileName(strFileName);

    boolean bIsCsvExport = strFileExtension.equals(EXPORT_CSV_EXT);
    boolean bDisplayDateCreation = directory.isDateShownInExport();
    boolean bDisplayDateModification = directory.isDateModificationShownInExport();

    List<Integer> listResultRecordId = new ArrayList<Integer>();

    if (request.getParameter(PARAMETER_BUTTON_EXPORT_SEARCH) != null) {
        String[] selectedRecords = request.getParameterValues(PARAMETER_SELECTED_RECORD);
        List<String> listSelectedRecords;

        if (selectedRecords != null) {
            listSelectedRecords = Arrays.asList(selectedRecords);

            if ((listSelectedRecords != null) && (listSelectedRecords.size() > 0)) {
                for (String strRecordId : listSelectedRecords) {
                    listResultRecordId.add(Integer.parseInt(strRecordId));
                }
            }
        } else {
            // sort order and sort entry are not needed in export
            listResultRecordId = DirectoryUtils.getListResults(request, directory, bWorkflowServiceEnable, true,
                    null, RecordFieldFilter.ORDER_NONE, searchFields, adminUser, adminUser.getLocale());
        }
    } else {
        // sort order and sort entry are not needed in export
        listResultRecordId = DirectoryUtils.getListResults(request, directory, bWorkflowServiceEnable, false,
                null, RecordFieldFilter.ORDER_NONE, searchFields, adminUser, adminUser.getLocale());
    }

    EntryFilter entryFilter = new EntryFilter();
    entryFilter.setIdDirectory(directory.getIdDirectory());
    entryFilter.setIsGroup(EntryFilter.FILTER_FALSE);
    entryFilter.setIsComment(EntryFilter.FILTER_FALSE);
    entryFilter.setIsShownInExport(EntryFilter.FILTER_TRUE);

    List<IEntry> listEntryResultSearch = EntryHome.getEntryList(entryFilter, getPlugin());

    Map<Integer, Field> hashFields = DirectoryUtils.getMapFieldsOfListEntry(listEntryResultSearch, getPlugin());

    StringBuffer strBufferListRecordXml = null;

    java.io.File tmpFile = null;
    BufferedWriter bufferedWriter = null;
    OutputStreamWriter outputStreamWriter = null;

    File fileTemplate = null;
    String strFileOutPut = DirectoryUtils.EMPTY_STRING;

    if (directoryXsl.getFile() != null) {
        fileTemplate = FileHome.findByPrimaryKey(directoryXsl.getFile().getIdFile(), getPlugin());
    }

    XmlTransformerService xmlTransformerService = null;
    PhysicalFile physicalFile = null;
    String strXslId = null;

    if ((fileTemplate != null) && (fileTemplate.getPhysicalFile() != null)) {
        fileTemplate.setPhysicalFile(PhysicalFileHome
                .findByPrimaryKey(fileTemplate.getPhysicalFile().getIdPhysicalFile(), getPlugin()));

        xmlTransformerService = new XmlTransformerService();
        physicalFile = fileTemplate.getPhysicalFile();
        strXslId = XSL_UNIQUE_PREFIX_ID + physicalFile.getIdPhysicalFile();
    }

    int nSize = listResultRecordId.size();
    boolean bIsBigExport = (nSize > EXPORT_RECORD_STEP);

    // Encoding export
    String strEncoding = StringUtils.EMPTY;

    if (bIsCsvExport) {
        strEncoding = DirectoryParameterService.getService().getExportCSVEncoding();
    } else {
        strEncoding = DirectoryParameterService.getService().getExportXMLEncoding();
    }

    if (bIsBigExport) {
        try {
            String strPath = AppPathService.getWebAppPath()
                    + AppPropertiesService.getProperty(PROPERTY_PATH_TMP);
            java.io.File tmpDir = new java.io.File(strPath);
            tmpFile = java.io.File.createTempFile(EXPORT_TMPFILE_PREFIX, EXPORT_TMPFILE_SUFIX, tmpDir);
        } catch (IOException e) {
            AppLogService.error("Unable to create temp file in webapp tmp dir");

            try {
                tmpFile = java.io.File.createTempFile(EXPORT_TMPFILE_PREFIX, EXPORT_TMPFILE_SUFIX);
            } catch (IOException e1) {
                AppLogService.error(e1);
            }
        }

        try {
            tmpFile.deleteOnExit();
            outputStreamWriter = new OutputStreamWriter(new FileOutputStream(tmpFile), strEncoding);
            bufferedWriter = new BufferedWriter(outputStreamWriter);
        } catch (IOException e) {
            AppLogService.error(e);
        }
    }

    Plugin plugin = this.getPlugin();
    Locale locale = request.getLocale();

    // ---------------------------------------------------------------------
    StringBuffer strBufferListEntryXml = new StringBuffer();

    if (bDisplayDateCreation && bIsCsvExport) {
        Map<String, String> model = new HashMap<String, String>();
        model.put(Entry.ATTRIBUTE_ENTRY_ID, "0");
        XmlUtil.beginElement(strBufferListEntryXml, Entry.TAG_ENTRY, model);

        String strDateCreation = I18nService.getLocalizedString(PROPERTY_ENTRY_TYPE_DATE_CREATION_TITLE,
                locale);
        XmlUtil.addElementHtml(strBufferListEntryXml, Entry.TAG_TITLE, strDateCreation);
        XmlUtil.endElement(strBufferListEntryXml, Entry.TAG_ENTRY);
    }

    if (bDisplayDateModification && bIsCsvExport) {
        Map<String, String> model = new HashMap<String, String>();
        model.put(Entry.ATTRIBUTE_ENTRY_ID, "0");
        XmlUtil.beginElement(strBufferListEntryXml, Entry.TAG_ENTRY, model);

        String strDateModification = I18nService.getLocalizedString(PROPERTY_ENTRY_TYPE_DATE_MODIFICATION_TITLE,
                locale);
        XmlUtil.addElementHtml(strBufferListEntryXml, Entry.TAG_TITLE, strDateModification);
        XmlUtil.endElement(strBufferListEntryXml, Entry.TAG_ENTRY);
    }

    for (IEntry entry : listEntryResultSearch) {
        entry.getXml(plugin, locale, strBufferListEntryXml);
    }

    Map<String, String> model = new HashMap<String, String>();

    if ((directory.getIdWorkflow() != DirectoryUtils.CONSTANT_ID_NULL) && bWorkflowServiceEnable) {
        model.put(TAG_DISPLAY, TAG_YES);
    } else {
        model.put(TAG_DISPLAY, TAG_NO);
    }

    XmlUtil.addEmptyElement(strBufferListEntryXml, TAG_STATUS, model);

    StringBuilder strBufferDirectoryXml = new StringBuilder();
    strBufferDirectoryXml.append(XmlUtil.getXmlHeader());

    if (bIsBigExport) {
        strBufferDirectoryXml
                .append(directory.getXml(plugin, locale, new StringBuffer(), strBufferListEntryXml));

        strBufferListRecordXml = new StringBuffer(EXPORT_STRINGBUFFER_INITIAL_SIZE);

        strFileOutPut = xmlTransformerService.transformBySourceWithXslCache(strBufferDirectoryXml.toString(),
                physicalFile.getValue(), strXslId, null, null);

        String strFinalOutPut = null;

        if (!bIsCsvExport) {
            int pos = strFileOutPut.indexOf(EXPORT_XSL_EMPTY_LIST_RECORD);
            strFinalOutPut = strFileOutPut.substring(0, pos) + EXPORT_XSL_BEGIN_LIST_RECORD;
        } else {
            strFinalOutPut = strFileOutPut;
        }

        try {
            bufferedWriter.write(strFinalOutPut);
        } catch (IOException e) {
            AppLogService.error(e);
        }
    } else {
        strBufferListRecordXml = new StringBuffer();
    }

    // -----------------------------------------------------------------------
    List<Integer> nTmpListId = new ArrayList<Integer>();
    int idWorflow = directory.getIdWorkflow();
    IRecordService recordService = SpringContextService.getBean(RecordService.BEAN_SERVICE);

    if (bIsBigExport) {
        int nXmlHeaderLength = XmlUtil.getXmlHeader().length() - 1;
        int max = nSize / EXPORT_RECORD_STEP;
        int max1 = nSize - EXPORT_RECORD_STEP;

        for (int i = 0; i < max1; i += EXPORT_RECORD_STEP) {
            AppLogService.debug("Directory export progress : " + (((float) i / nSize) * 100) + "%");

            nTmpListId = new ArrayList<Integer>();

            int k = i + EXPORT_RECORD_STEP;

            for (int j = i; j < k; j++) {
                nTmpListId.add(listResultRecordId.get(j));
            }

            List<Record> nTmpListRecords = recordService.loadListByListId(nTmpListId, plugin);

            for (Record record : nTmpListRecords) {
                State state = workflowService.getState(record.getIdRecord(), Record.WORKFLOW_RESOURCE_TYPE,
                        idWorflow, Integer.valueOf(directory.getIdDirectory()));

                if (bIsCsvExport) {
                    strBufferListRecordXml.append(record.getXmlForCsvExport(plugin, locale, false, state,
                            listEntryResultSearch, false, false, true, bDisplayDateCreation,
                            bDisplayDateModification, hashFields));
                } else {
                    strBufferListRecordXml
                            .append(record.getXml(plugin, locale, false, state, listEntryResultSearch, false,
                                    false, true, bDisplayDateCreation, bDisplayDateModification, hashFields));
                }
            }

            strBufferListRecordXml = this.appendPartialContent(strBufferListRecordXml, bufferedWriter,
                    physicalFile, bIsCsvExport, strXslId, nXmlHeaderLength, xmlTransformerService);
        }

        // -----------------------------------------------------------------------
        int max2 = EXPORT_RECORD_STEP * max;
        nTmpListId = new ArrayList<Integer>();

        for (int i = max2; i < nSize; i++) {
            nTmpListId.add(listResultRecordId.get((i)));
        }

        List<Record> nTmpListRecords = recordService.loadListByListId(nTmpListId, plugin);

        for (Record record : nTmpListRecords) {
            State state = workflowService.getState(record.getIdRecord(), Record.WORKFLOW_RESOURCE_TYPE,
                    idWorflow, Integer.valueOf(directory.getIdDirectory()));

            if (bIsCsvExport) {
                strBufferListRecordXml.append(
                        record.getXmlForCsvExport(plugin, locale, false, state, listEntryResultSearch, false,
                                false, true, bDisplayDateCreation, bDisplayDateModification, hashFields));
            } else {
                strBufferListRecordXml.append(record.getXml(plugin, locale, false, state, listEntryResultSearch,
                        false, false, true, bDisplayDateCreation, bDisplayDateModification, hashFields));
            }
        }

        strBufferListRecordXml = this.appendPartialContent(strBufferListRecordXml, bufferedWriter, physicalFile,
                bIsCsvExport, strXslId, nXmlHeaderLength, xmlTransformerService);

        strBufferListRecordXml.insert(0, EXPORT_XSL_BEGIN_PARTIAL_EXPORT);
        strBufferListRecordXml.insert(0, XmlUtil.getXmlHeader());
        strBufferListRecordXml.append(EXPORT_XSL_END_PARTIAL_EXPORT);
        strFileOutPut = xmlTransformerService.transformBySourceWithXslCache(strBufferListRecordXml.toString(),
                physicalFile.getValue(), strXslId, null, null);

        try {
            if (bIsCsvExport) {
                bufferedWriter.write(strFileOutPut);
            } else {
                bufferedWriter.write(strFileOutPut.substring(nXmlHeaderLength));
                bufferedWriter
                        .write(EXPORT_XSL_END_LIST_RECORD + EXPORT_XSL_NEW_LINE + EXPORT_XSL_END_DIRECTORY);
            }
        } catch (IOException e) {
            AppLogService.error(e);
        } finally {
            IOUtils.closeQuietly(bufferedWriter);
            IOUtils.closeQuietly(outputStreamWriter);
        }
    } else {
        List<Record> nTmpListRecords = recordService.loadListByListId(listResultRecordId, plugin);

        for (Record record : nTmpListRecords) {
            State state = workflowService.getState(record.getIdRecord(), Record.WORKFLOW_RESOURCE_TYPE,
                    idWorflow, Integer.valueOf(directory.getIdDirectory()));

            if (bIsCsvExport) {
                strBufferListRecordXml.append(
                        record.getXmlForCsvExport(plugin, locale, false, state, listEntryResultSearch, false,
                                false, true, bDisplayDateCreation, bDisplayDateModification, hashFields));
            } else {
                strBufferListRecordXml.append(record.getXml(plugin, locale, false, state, listEntryResultSearch,
                        false, false, true, bDisplayDateCreation, bDisplayDateModification, hashFields));
            }
        }

        strBufferDirectoryXml
                .append(directory.getXml(plugin, locale, strBufferListRecordXml, strBufferListEntryXml));
        strShotExportFinalOutPut = xmlTransformerService.transformBySourceWithXslCache(
                strBufferDirectoryXml.toString(), physicalFile.getValue(), strXslId, null, null);
    }

    // ----------------------------------------------------------------------- 
    DirectoryUtils.addHeaderResponse(request, response, strFileName);
    response.setCharacterEncoding(strEncoding);

    if (bIsCsvExport) {
        response.setContentType(CONSTANT_MIME_TYPE_CSV);
    } else {
        String strMimeType = FileSystemUtil.getMIMEType(strFileName);

        if (strMimeType != null) {
            response.setContentType(strMimeType);
        } else {
            response.setContentType(CONSTANT_MIME_TYPE_OCTETSTREAM);
        }
    }

    if (bIsBigExport) {
        FileChannel in = null;
        WritableByteChannel writeChannelOut = null;
        OutputStream out = null;

        try {
            in = new FileInputStream(tmpFile).getChannel();
            out = response.getOutputStream();
            writeChannelOut = Channels.newChannel(out);
            response.setContentLength(Long.valueOf(in.size()).intValue());
            in.transferTo(0, in.size(), writeChannelOut);
            response.getOutputStream().close();
        } catch (IOException e) {
            AppLogService.error(e);
        } finally {
            if (in != null) {
                try {
                    in.close();
                } catch (IOException e) {
                    AppLogService.error(e.getMessage(), e);
                }
            }

            IOUtils.closeQuietly(out);

            tmpFile.delete();
        }
    } else {
        PrintWriter out = null;

        try {
            out = response.getWriter();
            out.print(strShotExportFinalOutPut);
        } catch (IOException e) {
            AppLogService.error(e.getMessage(), e);
        } finally {
            if (out != null) {
                out.flush();
                out.close();
            }
        }
    }

    result.setNoop(true);

    return result;
}

From source file:com.linkedin.databus.core.TestDbusEventBufferMult.java

@Test
public void testSinglePPartionStreamFromLatest() throws Exception {
    createBufMult();/*from ww w  . j a v a 2  s  .co m*/

    PhysicalPartition[] p = { _pConfigs[0].getPhysicalPartition() };

    //generate a bunch of windows for 3 partitions
    int windowsNum = 10;
    for (int i = 1; i <= windowsNum; ++i) {
        DbusEventBufferAppendable buf = _eventBufferMult.getDbusEventBufferAppendable(p[0]);

        buf.startEvents();
        byte[] schema = "abcdefghijklmnop".getBytes(Charset.defaultCharset());
        assertTrue(buf.appendEvent(new DbusEventKey(1), (short) 100, (short) 0,
                System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[10], false, null));
        buf.endEvents(100 * i, null);
    }
    String[] pnames = new String[p.length];
    int count = 0;
    for (PhysicalPartition ip : p) {
        pnames[count++] = ip.toSimpleString();
    }

    StatsCollectors<DbusEventsStatisticsCollector> statsColl = createStats(pnames);

    PhysicalPartitionKey[] pkeys = { new PhysicalPartitionKey(p[0]) };

    CheckpointMult cpMult = new CheckpointMult();
    Checkpoint cp = new Checkpoint();
    cp.setFlexible();
    cp.setConsumptionMode(DbusClientMode.ONLINE_CONSUMPTION);
    cpMult.addCheckpoint(p[0], cp);

    DbusEventBufferBatchReadable reader = _eventBufferMult.getDbusEventBufferBatchReadable(cpMult,
            Arrays.asList(pkeys), statsColl);

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    WritableByteChannel writeChannel = Channels.newChannel(baos);
    // Set streamFromLatestScn == true
    reader.streamEvents(true, 1000000, writeChannel, Encoding.BINARY, new AllowAllDbusFilter());
    writeChannel.close();
    baos.close();

    //make sure we got the physical partition names right
    List<String> ppartNames = statsColl.getStatsCollectorKeys();
    assertEquals(ppartNames.size(), 1);

    HashSet<String> expectedPPartNames = new HashSet<String>(Arrays.asList(p[0].toSimpleString()));
    for (String ppartName : ppartNames) {
        assertTrue(expectedPPartNames.contains(ppartName));
    }

    //verify event counts per partition
    DbusEventsTotalStats[] ppartStats = { statsColl.getStatsCollector(p[0].toSimpleString()).getTotalStats() };

    // Only the last window is returned in each of the partitions
    assertEquals(ppartStats[0].getNumDataEvents(), 1);
    assertEquals(ppartStats[0].getNumSysEvents(), 1);

    assertEquals(statsColl.getStatsCollector().getTotalStats().getNumDataEvents(), (1));
    assertEquals(statsColl.getStatsCollector().getTotalStats().getNumSysEvents(), (1));

    assertEquals(statsColl.getStatsCollector().getTotalStats().getMaxTimeLag(), ppartStats[0].getTimeLag());
    assertEquals(statsColl.getStatsCollector().getTotalStats().getMinTimeLag(), ppartStats[0].getTimeLag());
}

From source file:me.StevenLawson.TotalFreedomMod.TFM_Util.java

public static void downloadFile(String url, File output, boolean verbose) throws java.lang.Exception {
    final URL website = new URL(url);
    ReadableByteChannel rbc = Channels.newChannel(website.openStream());
    FileOutputStream fos = new FileOutputStream(output);
    fos.getChannel().transferFrom(rbc, 0, 1 << 24);
    fos.close();/*w w w .  j a va  2s  . c o m*/

    if (verbose) {
        TFM_Log.info("Downloaded " + url + " to " + output.toString() + ".");
    }
}

From source file:de.digitalcollections.streaming.euphoria.controller.StreamingController.java

/**
 * Stream the given input to the given output via NIO {@link Channels} and a directly allocated NIO
 * {@link ByteBuffer}. Both the input and output streams will implicitly be closed after streaming, regardless of
 * whether an exception is been thrown or not.
 *
 * @param input The input stream.//from w  w  w .ja  v  a  2s.  c  o  m
 * @param output The output stream.
 * @return The length of the written bytes.
 * @throws IOException When an I/O error occurs.
 */
private long stream(InputStream input, OutputStream output) throws IOException {
    try (ReadableByteChannel inputChannel = Channels.newChannel(input);
            WritableByteChannel outputChannel = Channels.newChannel(output)) {
        ByteBuffer buffer = ByteBuffer.allocateDirect(DEFAULT_STREAM_BUFFER_SIZE);
        long size = 0;

        while (inputChannel.read(buffer) != -1) {
            buffer.flip();
            size += outputChannel.write(buffer);
            buffer.clear();
        }

        return size;
    }
}

From source file:squash.deployment.lambdas.ApiGatewayCustomResourceLambda.java

void constructApiAndUploadSdk(String restApiId, AmazonApiGateway apiGatewayClient, String region,
        String validDatesGETLambdaURI, String bookingsGETLambdaURI, String bookingsPUTDELETELambdaURI,
        String bookingRulesGETLambdaURI, String bookingRuleOrExclusionPUTDELETELambdaURI,
        String bookingsApiGatewayInvocationRole, String stageName, LambdaLogger logger) throws Exception {
    // Create the API's resources
    logger.log("Creating API resources");
    String validDates = createTopLevelResourceOnApi("validdates", restApiId, apiGatewayClient, logger).getId();
    String bookings = createTopLevelResourceOnApi("bookings", restApiId, apiGatewayClient, logger).getId();
    String bookingRules = createTopLevelResourceOnApi("bookingrules", restApiId, apiGatewayClient, logger)
            .getId();// w  w  w. ja va 2  s.  c o m
    String reservationForm = createTopLevelResourceOnApi("reservationform", restApiId, apiGatewayClient, logger)
            .getId();
    String cancellationForm = createTopLevelResourceOnApi("cancellationform", restApiId, apiGatewayClient,
            logger).getId();

    // Create the API's methods
    logger.log("Creating API methods");
    Map<String, String> extraParameters = new HashMap<>();

    String revvingSuffix = System.getenv("RevvingSuffix");

    // Methods on the validdates resource
    logger.log("Creating methods on validdates resource");
    extraParameters.put("ValidDatesGETLambdaURI", validDatesGETLambdaURI);
    extraParameters.put("BookingsGETLambdaURI", bookingsGETLambdaURI);
    extraParameters.put("BookingsPUTDELETELambdaURI", bookingsPUTDELETELambdaURI);
    extraParameters.put("BookingRulesGETLambdaURI", bookingRulesGETLambdaURI);
    extraParameters.put("BookingRulesPUTDELETELambdaURI", bookingRuleOrExclusionPUTDELETELambdaURI);
    extraParameters.put("BookingsApiGatewayInvocationRole", bookingsApiGatewayInvocationRole);
    createMethodOnResource("ValidDatesGET", validDates, restApiId, extraParameters, apiGatewayClient,
            revvingSuffix, region, logger);
    createMethodOnResource("ValidDatesOPTIONS", validDates, restApiId, extraParameters, apiGatewayClient,
            revvingSuffix, region, logger);

    // Methods on the bookings resource
    logger.log("Creating methods on bookings resource");
    createMethodOnResource("BookingsGET", bookings, restApiId, extraParameters, apiGatewayClient, revvingSuffix,
            region, logger);
    createMethodOnResource("BookingsDELETE", bookings, restApiId, extraParameters, apiGatewayClient,
            revvingSuffix, region, logger);
    createMethodOnResource("BookingsPUT", bookings, restApiId, extraParameters, apiGatewayClient, revvingSuffix,
            region, logger);
    createMethodOnResource("BookingsPOST", bookings, restApiId, extraParameters, apiGatewayClient,
            revvingSuffix, region, logger);
    createMethodOnResource("BookingsOPTIONS", bookings, restApiId, extraParameters, apiGatewayClient,
            revvingSuffix, region, logger);

    // Methods on the bookingrules resource
    logger.log("Creating methods on bookingrules resource");
    createMethodOnResource("BookingrulesGET", bookingRules, restApiId, extraParameters, apiGatewayClient,
            revvingSuffix, region, logger);
    createMethodOnResource("BookingrulesDELETE", bookingRules, restApiId, extraParameters, apiGatewayClient,
            revvingSuffix, region, logger);
    createMethodOnResource("BookingrulesPUT", bookingRules, restApiId, extraParameters, apiGatewayClient,
            revvingSuffix, region, logger);
    createMethodOnResource("BookingrulesOPTIONS", bookingRules, restApiId, extraParameters, apiGatewayClient,
            revvingSuffix, region, logger);

    // Methods on the reservationform resource
    logger.log("Creating methods on reservationform resource");
    createMethodOnResource("ReservationformGET", reservationForm, restApiId, extraParameters, apiGatewayClient,
            revvingSuffix, region, logger);
    createMethodOnResource("ReservationformOPTIONS", reservationForm, restApiId, extraParameters,
            apiGatewayClient, revvingSuffix, region, logger);

    // Methods on the cancellationform resource
    logger.log("Creating methods on cancellationform resource");
    createMethodOnResource("CancellationformGET", cancellationForm, restApiId, extraParameters,
            apiGatewayClient, revvingSuffix, region, logger);
    createMethodOnResource("CancellationformOPTIONS", cancellationForm, restApiId, extraParameters,
            apiGatewayClient, revvingSuffix, region, logger);

    // Deploy the api to a stage (with default throttling settings)
    logger.log("Deploying API to stage: " + stageName);
    CreateDeploymentRequest createDeploymentRequest = new CreateDeploymentRequest();
    createDeploymentRequest.setCacheClusterEnabled(false);
    createDeploymentRequest.setDescription("A deployment of the Squash api");
    createDeploymentRequest.setStageDescription("A stage for the Squash api");
    createDeploymentRequest.setStageName(stageName);
    createDeploymentRequest.setRestApiId(restApiId);
    CreateDeploymentResult createDeploymentResult = apiGatewayClient.createDeployment(createDeploymentRequest);
    logger.log("Deployed to stage with ID: " + createDeploymentResult.getId());

    // FIXME
    // Throttle all methods on this stage - does not seem to work yet?
    // logger.log("Throttling all of stage's methods");
    // GetStagesRequest getStagesRequest = new GetStagesRequest();
    // getStagesRequest.setRestApiId(restApiId);
    // GetStagesResult getStagesResult =
    // apiGatewayClient.getStages(getStagesRequest);
    // List<Stage> stages = getStagesResult.getItem();
    // Stage stage = stages.stream().filter(s ->
    // s.getStageName().equals(stageName)).findFirst().get();
    // MethodSetting methodSetting = new MethodSetting();
    // methodSetting.setThrottlingBurstLimit(10);
    // methodSetting.setThrottlingRateLimit(1.0);
    // stage.addMethodSettingsEntry("*/*", methodSetting); // Adds to all
    // methods
    // logger.log("Throttling completed");

    // Download javascript sdk and upload it to the S3 bucket serving the
    // squash site
    logger.log("Downloading Javascript SDK");
    GetSdkRequest getSdkRequest = new GetSdkRequest();
    getSdkRequest.setRestApiId(restApiId);
    getSdkRequest.setStageName(stageName);
    getSdkRequest.setSdkType("JavaScript");
    // This is for Android sdks but it crashes if the map is empty - so set
    // to something
    Map<String, String> paramsMap = new HashMap<>();
    paramsMap.put("GroupID", "Dummy");
    getSdkRequest.setParameters(paramsMap);
    GetSdkResult getSdkResult = apiGatewayClient.getSdk(getSdkRequest);

    // Copy the sdk to S3 via AWS lambda's temporary file system
    logger.log("Copying Javascript SDK to S3");
    try {
        logger.log("Saving SDK to lambda's temporary file system");
        ByteBuffer sdkBuffer = getSdkResult.getBody().asReadOnlyBuffer();
        try (FileOutputStream fileOutputStream = new FileOutputStream("/tmp/sdk.zip")) {
            try (WritableByteChannel channel = Channels.newChannel(fileOutputStream)) {
                channel.write(sdkBuffer);
            }
        }
        // Unzip the sdk
        logger.log("SDK saved. Now unzipping");
        String outputFolder = "/tmp/extractedSdk";
        ZipFile zipFile = new ZipFile("/tmp/sdk.zip");
        try {
            Enumeration<? extends ZipEntry> entries = zipFile.entries();
            while (entries.hasMoreElements()) {
                ZipEntry entry = entries.nextElement();
                logger.log("Unzipping next entry: " + entry.getName());
                File entryDestination = new File(outputFolder, entry.getName());
                if (entry.isDirectory()) {
                    entryDestination.mkdirs();
                } else {
                    entryDestination.getParentFile().mkdirs();
                    InputStream in = zipFile.getInputStream(entry);
                    OutputStream out = new FileOutputStream(entryDestination);
                    IOUtils.copy(in, out);
                    IOUtils.closeQuietly(in);
                    out.close();
                }
            }
        } finally {
            zipFile.close();
        }
        logger.log("SDK unzipped.");

        // GZIP all the sdk files individually
        logger.log("Gzip-ing sdk files to enable serving gzip-ed from S3");
        FileUtils.gzip(Arrays.asList(new File(outputFolder)), Collections.emptyList(), logger);
        logger.log("Gzip-ed sdk files to enable serving gzip-ed from S3");

        // Rev the files by appending revving-suffix to names - for cache-ing
        File sdkFolder = new File("/tmp/extractedSdk/apiGateway-js-sdk");
        FileUtils.appendRevvingSuffix(revvingSuffix, sdkFolder.toPath(), logger);

        // Upload the sdk from the temporary filesystem to S3.
        logger.log("Uploading unzipped Javascript SDK to S3 bucket: " + squashWebsiteBucket);
        TransferUtils.waitForS3Transfer(TransferManagerBuilder.defaultTransferManager()
                .uploadDirectory(squashWebsiteBucket, "", sdkFolder, true), logger);
        logger.log("Uploaded sdk successfully to S3");

        // Add gzip content-encoding metadata to zip-ed files
        logger.log("Updating gzip metadata on Javascript SDK in S3 bucket");
        TransferUtils.addGzipContentEncodingMetadata(squashWebsiteBucket, Optional.empty(), logger);
        logger.log("Updated gzip metadata on Javascript SDK in S3 bucket");

        // Add cache-control metadata to zip-ed files. js files will have
        // 1-year cache validity, since they are rev-ved.
        logger.log("Updating cache-control metadata on Javascript SDK in S3 bucket");
        TransferUtils.addCacheControlHeader("max-age=31536000", squashWebsiteBucket, Optional.empty(), ".js",
                logger);
        logger.log("Updated cache-control metadata on Javascript SDK in S3 bucket");

        logger.log("Setting public read permission on uploaded sdk");
        TransferUtils.setPublicReadPermissionsOnBucket(squashWebsiteBucket, Optional.empty(), logger);
        logger.log("Finished setting public read permissions on uploaded sdk");
    } catch (Exception e) {
        logger.log("Exception caught whilst copying Javascript SDK to S3: " + e.getMessage());
        throw e;
    }
}

From source file:org.alfresco.repo.transfer.HttpClientTransmitterImpl.java

/**
 *
 *///from w  w  w .ja  v  a2 s.co  m
public void getTransferReport(Transfer transfer, OutputStream result) {
    TransferTarget target = transfer.getTransferTarget();
    PostMethod getReportRequest = getPostMethod();
    try {
        HostConfiguration hostConfig = getHostConfig(target);
        HttpState httpState = getHttpState(target);

        try {
            getReportRequest.setPath(target.getEndpointPath() + "/report");

            //Put the transferId on the query string
            getReportRequest.setQueryString(
                    new NameValuePair[] { new NameValuePair("transferId", transfer.getTransferId()) });

            int responseStatus = httpClient.executeMethod(hostConfig, getReportRequest, httpState);
            checkResponseStatus("getReport", responseStatus, getReportRequest);

            InputStream is = getReportRequest.getResponseBodyAsStream();

            // Now copy the response input stream to result.
            final ReadableByteChannel inputChannel = Channels.newChannel(is);
            final WritableByteChannel outputChannel = Channels.newChannel(result);
            try {
                // copy the channels
                channelCopy(inputChannel, outputChannel);
            } finally {
                // closing the channels
                inputChannel.close();
                outputChannel.close();
            }

            return;
        } catch (RuntimeException e) {
            throw e;
        } catch (Exception e) {
            String error = "Failed to execute HTTP request to target";
            log.debug(error, e);
            throw new TransferException(MSG_HTTP_REQUEST_FAILED,
                    new Object[] { "getTransferReport", target.toString(), e.toString() }, e);
        }
    } finally {
        getReportRequest.releaseConnection();
    }
}

From source file:org.craftercms.studio.impl.v1.repository.git.GitContentRepositoryHelper.java

public boolean writeFile(Repository repo, String site, String path, InputStream content) {
    boolean result = true;

    try {/*from   w  w  w. j  av a2s .  c om*/
        // Create basic file
        File file = new File(repo.getDirectory().getParent(), path);

        // Create parent folders
        File folder = file.getParentFile();
        if (folder != null) {
            if (!folder.exists()) {
                folder.mkdirs();
            }
        }

        // Create the file if it doesn't exist already
        if (!file.exists()) {
            try {
                if (!file.createNewFile()) {
                    logger.error("error creating file: site: " + site + " path: " + path);
                    result = false;
                }
            } catch (IOException e) {
                logger.error("error creating file: site: " + site + " path: " + path, e);
                result = false;
            }
        }

        if (result) {
            // Write the bits
            try (FileChannel outChannel = new FileOutputStream(file.getPath()).getChannel()) {
                logger.debug("created the file output channel");
                ReadableByteChannel inChannel = Channels.newChannel(content);
                logger.debug("created the file input channel");
                long amount = 1024 * 1024; // 1MB at a time
                long count;
                long offset = 0;
                while ((count = outChannel.transferFrom(inChannel, offset, amount)) > 0) {
                    logger.debug("writing the bits: offset = " + offset + " count: " + count);
                    offset += count;
                }
            }

            // Add the file to git
            try (Git git = new Git(repo)) {
                git.add().addFilepattern(getGitPath(path)).call();

                git.close();
                result = true;
            } catch (GitAPIException e) {
                logger.error("error adding file to git: site: " + site + " path: " + path, e);
                result = false;
            }
        }
    } catch (IOException e) {
        logger.error("error writing file: site: " + site + " path: " + path, e);
        result = false;
    }

    return result;
}

From source file:com.linkedin.databus.core.TestDbusEventBufferMult.java

@Test
public void testMultiPPartionStreamFromLatest() throws Exception {
    createBufMult();/*from ww  w .j a  v  a 2  s  .  com*/

    PhysicalPartition[] p = { _pConfigs[0].getPhysicalPartition(), _pConfigs[1].getPhysicalPartition(),
            _pConfigs[2].getPhysicalPartition() };

    //generate a bunch of windows for 3 partitions
    int windowsNum = 10;
    for (int i = 1; i <= windowsNum; ++i) {
        DbusEventBufferAppendable buf = _eventBufferMult.getDbusEventBufferAppendable(p[0]);

        buf.startEvents();
        byte[] schema = "abcdefghijklmnop".getBytes(Charset.defaultCharset());
        assertTrue(buf.appendEvent(new DbusEventKey(1), (short) 100, (short) 0,
                System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[10], false, null));
        buf.endEvents(100 * i, null);

        buf = _eventBufferMult.getDbusEventBufferAppendable(p[1]);
        buf.startEvents();
        assertTrue(buf.appendEvent(new DbusEventKey(1), (short) 101, (short) 2,
                System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[100], false, null));
        assertTrue(buf.appendEvent(new DbusEventKey(2), (short) 101, (short) 2,
                System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[10], false, null));
        buf.endEvents(100 * i + 1, null);

        buf = _eventBufferMult.getDbusEventBufferAppendable(p[2]);
        buf.startEvents();
        assertTrue(buf.appendEvent(new DbusEventKey(1), (short) 101, (short) 2,
                System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[100], false, null));
        assertTrue(buf.appendEvent(new DbusEventKey(2), (short) 101, (short) 2,
                System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[10], false, null));
        assertTrue(buf.appendEvent(new DbusEventKey(3), (short) 101, (short) 2,
                System.currentTimeMillis() * 1000000, (short) 2, schema, new byte[10], false, null));
        buf.endEvents(100 * i + 2, null);
    }
    String[] pnames = new String[p.length];
    int count = 0;
    for (PhysicalPartition ip : p) {
        pnames[count++] = ip.toSimpleString();
    }

    StatsCollectors<DbusEventsStatisticsCollector> statsColl = createStats(pnames);

    PhysicalPartitionKey[] pkeys = { new PhysicalPartitionKey(p[0]), new PhysicalPartitionKey(p[1]),
            new PhysicalPartitionKey(p[2]) };

    CheckpointMult cpMult = new CheckpointMult();
    for (int i = 0; i < 3; ++i) {
        Checkpoint cp = new Checkpoint();
        cp.setFlexible();
        cp.setConsumptionMode(DbusClientMode.ONLINE_CONSUMPTION);
        cpMult.addCheckpoint(p[i], cp);
    }

    DbusEventBufferBatchReadable reader = _eventBufferMult.getDbusEventBufferBatchReadable(cpMult,
            Arrays.asList(pkeys), statsColl);

    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    WritableByteChannel writeChannel = Channels.newChannel(baos);
    // Set streamFromLatestScn == true
    reader.streamEvents(true, 1000000, writeChannel, Encoding.BINARY, new AllowAllDbusFilter());
    writeChannel.close();
    baos.close();

    //make sure we got the physical partition names right
    List<String> ppartNames = statsColl.getStatsCollectorKeys();
    assertEquals(ppartNames.size(), 3);

    HashSet<String> expectedPPartNames = new HashSet<String>(
            Arrays.asList(p[0].toSimpleString(), p[1].toSimpleString(), p[2].toSimpleString()));
    for (String ppartName : ppartNames) {
        assertTrue(expectedPPartNames.contains(ppartName));
    }

    //verify event counts per partition
    DbusEventsTotalStats[] ppartStats = { statsColl.getStatsCollector(p[0].toSimpleString()).getTotalStats(),
            statsColl.getStatsCollector(p[1].toSimpleString()).getTotalStats(),
            statsColl.getStatsCollector(p[2].toSimpleString()).getTotalStats() };

    // Only the last window is returned in each of the partitions
    assertEquals(ppartStats[0].getNumDataEvents(), 1);
    assertEquals(ppartStats[1].getNumDataEvents(), 2);
    assertEquals(ppartStats[2].getNumDataEvents(), 3);
    assertEquals(ppartStats[0].getNumSysEvents(), 1);
    assertEquals(ppartStats[1].getNumSysEvents(), 1);
    assertEquals(ppartStats[2].getNumSysEvents(), 1);

    assertEquals(statsColl.getStatsCollector().getTotalStats().getNumDataEvents(), (1 + 2 + 3));
    assertEquals(statsColl.getStatsCollector().getTotalStats().getNumSysEvents(), (1 + 1 + 1));

    assertEquals(statsColl.getStatsCollector().getTotalStats().getMaxTimeLag(), Math
            .max(ppartStats[0].getTimeLag(), Math.max(ppartStats[1].getTimeLag(), ppartStats[2].getTimeLag())));
    assertEquals(statsColl.getStatsCollector().getTotalStats().getMinTimeLag(), Math
            .min(ppartStats[0].getTimeLag(), Math.min(ppartStats[1].getTimeLag(), ppartStats[2].getTimeLag())));
}

From source file:org.bytesoft.openjtcc.supports.logger.DbTransactionLoggerImpl.java

private byte[] streamToByteArray(InputStream input) {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    ReadableByteChannel in = null;
    WritableByteChannel out = null;
    ByteBuffer buffer = ByteBuffer.allocate(1024);
    try {/* w  w  w  . j a v a  2 s . c o m*/
        in = Channels.newChannel(input);
        out = Channels.newChannel(baos);
        while (in.read(buffer) != -1) {
            buffer.flip();
            out.write(buffer);
            buffer.clear();
        }
    } catch (IOException ex) {
        // ignore
    } finally {
        if (out != null) {
            try {
                out.close();
            } catch (IOException e) {
                // ignore
            }
        }
        if (baos != null) {
            try {
                baos.close();
            } catch (IOException e) {
                // ignore
            }
        }
    }
    return baos.toByteArray();
}