Example usage for java.io BufferedWriter flush

List of usage examples for java.io BufferedWriter flush

Introduction

In this page you can find the example usage for java.io BufferedWriter flush.

Prototype

public void flush() throws IOException 

Source Link

Document

Flushes the stream.

Usage

From source file:net.grinder.SingleConsole.java

private void writeReportData(String name, String value) {
    try {/* w  ww . ja  va 2s.  co  m*/
        BufferedWriter bw = fileWriterMap.get(name);
        if (bw == null) {
            bw = new BufferedWriter(new FileWriter(new File(this.reportPath, name), true));
            fileWriterMap.put(name, bw);
        }
        bw.write(value);
        bw.newLine();
        bw.flush();
    } catch (Exception e) {
        LOGGER.error(e.getMessage(), e);
        throw processException(e);
    }
}

From source file:gtu._work.ui.SqlCreaterUI.java

private void firstRowMakeInsertSqlBtn(ActionEvent evt) {
    try {//ww  w .  j  av  a 2 s . c  o m
        String tableName = Validate.notBlank(tableNameText.getText(), "??");
        File srcFile = JCommonUtil.filePathCheck(excelFilePathText2.getText(), "?", "xlsx");
        File saveFile = JCommonUtil._jFileChooser_selectFileOnly_saveFile();
        if (saveFile == null) {
            JCommonUtil._jOptionPane_showMessageDialog_error("?");
            return;
        }

        BufferedWriter writer = new BufferedWriter(
                new OutputStreamWriter(new FileOutputStream(saveFile), "utf8"));

        BufferedInputStream bis = new BufferedInputStream(new FileInputStream(srcFile));
        XSSFWorkbook xssfWorkbook = new XSSFWorkbook(bis);
        Sheet sheet = xssfWorkbook.getSheetAt(0);

        LinkedHashMap<String, String> valueMap = new LinkedHashMap<String, String>();
        for (int ii = 0; ii < sheet.getRow(0).getLastCellNum(); ii++) {
            valueMap.put(formatCellType(sheet.getRow(0).getCell(ii)), "");
        }

        for (int j = 0; j < sheet.getPhysicalNumberOfRows(); j++) {
            Row row = sheet.getRow(j);
            LinkedHashMap<String, String> valueMap2 = (LinkedHashMap<String, String>) valueMap.clone();
            int ii = 0;
            for (String key : valueMap2.keySet()) {
                valueMap2.put(key, formatCellType(row.getCell(ii)));
                ii++;
            }
            appendLog("" + valueMap2);
            String insertSql = this.fetchInsertSQL(tableName, valueMap2);
            appendLog("" + insertSql);
            writer.write(insertSql);
            writer.newLine();
        }
        bis.close();

        writer.flush();
        writer.close();

        JCommonUtil._jOptionPane_showMessageDialog_info("? : \n" + saveFile);
    } catch (Exception ex) {
        JCommonUtil.handleException(ex);
    }
}

From source file:org.apache.playframework.generator.mybatisplus.AutoGenerator.java

/**
 * Controller/*from   w ww  .j  a va 2 s . co  m*/
 *
 * @param beanName
 * @param controllerName
 * @throws IOException
 */
protected void buildController(String beanName, String controllerName) throws IOException {
    File serviceFile = new File(PATH_CONTROLLER_IMPL, controllerName + ".java");
    BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(serviceFile), "utf-8"));
    bw.write("package " + config.getControllerPackage() + ";");
    bw.newLine();
    bw.newLine();
    bw.write("import org.springframework.stereotype.Controller;");
    bw.newLine();

    bw = buildClassComment(bw, beanName + " ");
    bw.newLine();
    bw.write("@Controller");
    bw.newLine();
    bw.write("public class " + controllerName + " {");
    bw.newLine();
    bw.newLine();
    bw.newLine();
    bw.write("}");
    bw.flush();
    bw.close();
}

From source file:com.searchtechnologies.aspire.components.heritrixconnector.HeritrixScanner.java

/**
 * Find all URLs that were not accessed and evaluate which ones should be deleted
 * @param info //from  ww  w .j  av a 2 s . c  o  m
 * @param job
 * @throws AspireException
 * @throws IOException 
 */
private void deleteAfterCrawl(HeritrixSourceInfo info) throws AspireException, IOException {

    if (info.getTempUncrawledDB() == null) {
        info.setTempUncrawledDB(info.openTempDB());
    }

    info.getTempUncrawledDB().clear();
    info.commitUrlDB();

    if (HeritrixSourceInfo.INITIAL_CRAWL_COMPLETE
            .equals(info.getIncrementalDB().get("||status||").split(",")[0])) {

        /* Contains the all the Entries on the database */
        Iterator<Entry<String, String>> iter = info.getIncrementalDB().entrySet().iterator();
        //Writes uncrawled urls to files by its host name
        HashMap<String, BufferedWriter> files = new HashMap<String, BufferedWriter>();
        long commitDB = 0;
        // Scan through ALL URLs inside of JDBM2 (SCAN_UNCRAWLED LOOP)
        while (iter.hasNext() && info.getStatus() != HeritrixSourceInfo.SCAN_STOPPED) {
            Entry<String, String> entry = iter.next();
            String url = entry.getKey();
            String data = entry.getValue();
            DataBaseURLEntry value = null;

            if (!"||status||".equals(url)) {
                if (data != null)
                    value = DataBaseURLEntry.createDataBaseURLEntryFromString(data);

                long diff = info.getStartCrawlTime().getTime() - value.getLastAccessedTime().getTime();

                /* We only need those that were not accessed on the actual crawl */
                if (value != null && diff > 0) {
                    if (url != null && info.getTempUncrawledDB().get(url) == null) {
                        info.getTempUncrawledDB().put(url, data);

                        commitDB++;
                        if (commitDB % 25 == 0) {
                            info.commitUrlDB();
                        }

                        //Add it to the respective hostname file
                        String hostname = new URL(StringUtilities.safeUrl(url)).getHost();
                        if (!files.containsKey(hostname)) {
                            File file = new File(info.getUrlDir() + "/urlsToDelete_" + hostname + ".urls");
                            file.getParentFile().mkdirs();
                            if (file.exists()) {
                                file.delete();
                            }
                            files.put(hostname, new BufferedWriter(new FileWriter(file)));
                        }
                        files.get(hostname).write(url + " " + entry.getValue() + "\n");

                    }
                }
            }
            if (info.getStatus() == HeritrixSourceInfo.SCAN_PAUSED) {
                info.commitUrlDB();
            }
            while (info.getStatus() == HeritrixSourceInfo.SCAN_PAUSED)
                ;
        }
        info.getIncrementalDB().put("||status||",
                HeritrixSourceInfo.TEMP_UNCRAWLED_DB_CREATED + "," + info.getStartCrawlTime().getTime());
        info.commitUrlDB();
        for (BufferedWriter bw : files.values()) {
            bw.flush();
            bw.close();
        }

        //Fill the hashmap of hostnames-Status
        try {
            for (String hostname : files.keySet())
                scanUncrawledUrls(info, hostname);
        } catch (IOException ioe) {
            error(ioe, "Error scanning uncrawled urls file");
            info.setScannerErrorMessage(ioe, "Error scanning uncrawled urls file");
            throw new AspireException(
                    "com.searchtechnologies.aspire.components.heritrixconnector.HeritrixScanner", ioe,
                    "Error scanning uncrawled urls file");
        }

        info.getPriorityQueueChecker().start(info.getScanJob(), this);

        long lastChange = new Date().getTime();
        int lastCount = info.getTempUncrawledDB().size();
        while (info.getPriorityQueueChecker().isRunning()) {
            try {
                Thread.sleep(500);
                if (new Date().getTime() - lastChange >= 2000) {
                    try {
                        for (String hostname : files.keySet())
                            scanUncrawledUrls(info, hostname);
                    } catch (IOException ioe) {
                        error(ioe, "Error scanning uncrawled urls file");
                        info.setScannerErrorMessage(ioe, "Error scanning uncrawled urls file");
                        throw new AspireException(
                                "com.searchtechnologies.aspire.components.heritrixconnector.HeritrixScanner",
                                ioe, "Error scanning uncrawled urls file");
                    }
                }
                if (lastCount != info.getTempUncrawledDB().size()) {
                    lastChange = new Date().getTime();
                }
            } catch (InterruptedException e) {
            }
        }

    }

    if (info.getStatus() == HeritrixSourceInfo.SCAN_PAUSED) {
        info.commitUrlDB();
    } else if (info.getStatus() == HeritrixSourceInfo.SCAN_STOPPED) {
        info.getTempUncrawledDB().clear();
        info.commitUrlDB();
    } else if (HeritrixSourceInfo.TEMP_UNCRAWLED_DB_CREATED
            .equals(info.getIncrementalDB().get("||status||").split(",")[0])) {
        info.commitUrlDB();
    }

}

From source file:cloud.elasticity.elastman.Sensor.java

@Override
public void run() {

    String filename;/*from www  .  j  a  va  2  s  . c  o  m*/
    if (controlMode) {
        filename = "control.dat";
    } else {
        filename = "ident.dat";
    }

    // Open the data file
    FileWriter fstream;
    BufferedWriter out = null;
    try {
        fstream = new FileWriter(filename);
        out = new BufferedWriter(fstream);
    } catch (IOException e1) {
        // TODO Auto-generated catch block
        e1.printStackTrace();
    }

    // Write a header  
    try {
        out.write("T \t" + "PeriodSec \t" + "Clients \t" + "Servers \t" + "TotalOps \t" + "Throughput \t"
                + "ThroPerServ \t" +

                "tpsR \t" + "meanR \t" + "stddivR \t" + "minR \t" + "p95R \t" + "p99R \t" + "fp99R \t"
                + "maxR \t" +

                "tpSM \t" + "meanM \t" + "stddivM \t" + "minM \t" + "p95M \t" + "p99M \t" + "maxM \t" + "ntp \t"
                + "nfp99 \t" + "opID \t" + "log\n");

        out.flush();
    } catch (IOException e) {
        log.error(e.getMessage());
    }

    boolean firstInput = true;
    double lastTps = 0;
    boolean bigTPChange = false;

    while (identifying) {
        if (warmup > 0) {
            warmup--;
        } else if (warmup == 0) {
            warmup--;
            //            lastTimeSec = System.nanoTime() / 1000000000; // for the controller  // NOT USED 
        }

        long start = System.nanoTime();

        //// sleep for sampling time then collect data
        try {
            Thread.sleep(period * 1000);
        } catch (InterruptedException e) {
            log.error(e.getMessage());
        }
        timeStep++;
        // loop and fetch data from each YCSB client
        updateMonitoringData();
        long end = System.nanoTime();
        long pInSec = (end - start) / 1000000000; // sampling period in seconds

        final double throughput = total_op.getSum() / pInSec;

        // Throughput per server
        final double tps = throughput / cluster.getActiveVoldVMsCount();
        // Read Throughput per server
        final double rtps = read_op.getSum() / pInSec / cluster.getActiveVoldVMsCount();
        // Write Throughput per server
        final double mtps = mixed_op.getSum() / pInSec / cluster.getActiveVoldVMsCount();

        // calculate a smoothed value of the p99 as well
        filter.step(read_p99.getMean());

        if (firstInput) {
            lastTps = tps;
            firstInput = false;
        }

        log.debug("Summary: " + timeStep + " \t" + pInSec + " \t" + (clientSockets.size() - deadSockets.size())
                + " \t" + cluster.getActiveVoldVMsCount() + " \t" + total_op.getSum() + " \t"
                + (long) (throughput) + " \t" + (long) (throughput / cluster.getActiveVoldVMsCount()) + " \t"
                + rtps + " \t" + (long) read_mean.getMean() + " \t" + (long) read_stddiv.getMean() + " \t"
                + (long) read_min.getMean() + " \t" + (long) read_p95.getMean() + " \t"
                + (long) read_p99.getMean() + " \t" + (long) filter.getValue() + " \t"
                + (long) read_max.getMean() + " \t" + mtps + " \t" + (long) mixed_mean.getMean() + " \t"
                + (long) mixed_stddiv.getMean() + " \t" + (long) mixed_min.getMean() + " \t"
                + (long) mixed_p95.getMean() + " \t" + (long) mixed_p99.getMean() + " \t"
                + (long) mixed_max.getMean() + " \t"
                + (long) ((throughput / cluster.getActiveVoldVMsCount()) - outOp) + " \t"
                + (long) (filter.getValue() - inOp));

        try {
            out.write("" + timeStep + " \t" + pInSec + " \t" + (clientSockets.size() - deadSockets.size())
                    + " \t" + cluster.getActiveVoldVMsCount() + " \t" + total_op.getSum() + " \t"
                    + (long) (throughput) + " \t" + (long) (throughput / cluster.getActiveVoldVMsCount())
                    + " \t" + (long) rtps + " \t" + (long) read_mean.getMean() + " \t"
                    + (long) read_stddiv.getMean() + " \t" + (long) read_min.getMean() + " \t"
                    + (long) read_p95.getMean() + " \t" + (long) read_p99.getMean() + " \t"
                    + (long) filter.getValue() + " \t" + (long) read_max.getMean() + " \t" + (long) mtps + " \t"
                    + (long) mixed_mean.getMean() + " \t" + (long) mixed_stddiv.getMean() + " \t"
                    + (long) mixed_min.getMean() + " \t" + (long) mixed_p95.getMean() + " \t"
                    + (long) mixed_p99.getMean() + " \t" + (long) mixed_max.getMean() + " \t"
                    + (long) ((throughput / cluster.getActiveVoldVMsCount()) - outOp) + " \t"
                    + (long) (filter.getValue() - inOp) + " \t");
            if (!controlMode) {
                out.write("-1 \tIdent\n");
                out.flush();
            } // else -> later append control log and flush

        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

        System.out.println("======================");

        //clear stats
        read_op.clear();
        read_mean.clear();
        read_stddiv.clear();
        read_min.clear();
        read_p95.clear();
        read_p99.clear();
        read_max.clear();

        mixed_op.clear();
        mixed_mean.clear();
        mixed_stddiv.clear();
        mixed_min.clear();
        mixed_p95.clear();
        mixed_p99.clear();
        mixed_max.clear();

        total_op.clear();

        // remove dead clients
        if (deadSockets.size() > 0) {
            clientSockets.removeAll(deadSockets);
            deadSockets.clear();
            System.out.println("Removind Dead Sockets!");
        }
        if (!controlMode && clientSockets.size() == 0) {
            identifying = false; // finished the identification
            System.out.println("Identification completed");
        }
        if (warmup == 0) { // next time the controller will be started!! so initialize;
            pid.reset();
            filter.reset(); // to remove any noise in startup
        }

        if (controlMode && warmup >= 0) {
            try {
                out.write("0 \tWarmup\n");
                out.flush();
            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
        } else if (controlMode && warmup < 0) {

            if (!isRebalancing() && Math.abs(lastTps - tps) > ffThroughputDelta) {
                bigTPChange = true;
                System.out.println("Big Throughput Change: " + (lastTps - tps));
            }

            // 0 - check
            cluster.updateVMs();
            if (actuator.isCreateVMs() && cluster.getActiveVoldVMsCount() != cluster.getVoldVMsCount()) { // then there is something wrong (e.g., didn't finish removing nodes)
                System.out.println("Vold Count Error!!"); // Should never happen unless someone adds VoldVMs externally
                pid.reset();
                filter.reset();
                try {
                    out.write("3 \tRebalanceNotComplete!\n");
                    out.flush();
                } catch (IOException e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
            }
            // TODO: 1 - Error is very large for first time then do nothing
            //            else if (firstLargeValue && !(Math.abs(lastTps-tps)>ffThroughputDelta))  {   // this is probably noise, Ignore it
            //               // do nothing
            ////               pidReset(filter);
            //               System.out.println("Controller: Very large value for first time! Do nothing!");
            //               try {
            //                  out.write("4 \tFirstLarge\n");
            //                  out.flush();
            //               } catch (IOException e) {
            //                  // TODO Auto-generated catch block
            //                  e.printStackTrace();
            //               }
            //            }
            // 2 - if in dead zone then do nothing
            else if (inOp - 2 * dead <= filter.getValue() && filter.getValue() <= inOp + dead) {
                System.out.println("Controller: in dead zone! Do nothing!");
                pid.reset();
                filter.reset();
                try {
                    out.write("0 \tDeadZone\n");
                    out.flush();
                } catch (IOException e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
            }
            // 3 - Rebalancing
            else if (isRebalancing()) {
                System.out.println("Controller: Rebalancing! Do nothing!");
                //               pidReset(filter);
                try {
                    out.write("3 \tRebalancing\n");
                    out.flush();
                } catch (IOException e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
                // FIXME: now I give rebalance 2 period to finish.
                // Should check the real status and update rebalance accordingly
            }
            // 3.5 - if current latency is less than desired and min servers if 3 then do nothing.
            else if (cluster.getActiveVoldVMsCount() <= 3 && filter.getValue() <= inOp + dead) { // should never be < 3
                System.out.println(
                        "Controller: Having min=3 Vold VMs and the response time is OK! Not running controller");
                pid.reset();
                filter.reset();
                try {
                    out.write("0 \tMinVMs\n");
                    out.flush();
                } catch (IOException e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
            } // 4 - 
            else {
                boolean usePID = true, ffFail = false;

                if (timeStep > nextFF && (bigTPChange || (filter.getValue() > (inOp + (inOp * 0.5))
                        || filter.getValue() < (inOp - (inOp * 0.5)))) /*Big change in load use ff*/) {//(filter>(inOp + (inOp*0.5)) || filter<(inOp - (inOp*0.5)))) {
                    usePID = false;
                    bigTPChange = false;
                    //   use binary classifier
                    nextFF = timeStep + 4; // TODO: Fix nextFF
                    System.out.println("Controller: Using FF");
                    double output = ff.classify(rtps, mtps);
                    // calculate number of servers needed to handle current throughput
                    double n = (throughput / output) - cluster.getActiveVoldVMsCount();

                    // TODO: Now I get ceil. Check if there is a better solution
                    int nn = 0;
                    //                  if(n>0) {
                    nn = (int) Math.ceil(n);
                    //                  } else {
                    //                     nn=(int)Math.floor(n);
                    //                  }

                    //int nn = (int)Math.round(n);

                    System.out.println(
                            "Controller: FF output = " + output + " that is " + n + " -> " + nn + " servers");

                    if ((filter.getValue() > (inOp + inOp * 0.5) && nn < 3)
                            || (filter.getValue() < (inOp - inOp * 0.5) && nn > -3)) {//Math.abs(nn)<3) {
                        // Very large error & add/rem few VMs! Must be outside of op region
                        // Fall back to FB
                        usePID = true;
                        ffFail = true;
                    } else {

                        try {
                            out.write("2 \tFF#" + output + "#" + n + "#" + nn + "\n");
                            out.flush();
                            pid.reset();
                            filter.reset();
                        } catch (IOException e) {
                            // TODO Auto-generated catch block
                            e.printStackTrace();
                        }
                        if (nn > 0 || (nn < 0 && cluster.getActiveVoldVMsCount() > 3)) {
                            actuator.scheduleRebalance(nn, false);

                        }
                    }
                }
                if (usePID) { // 4 - use PID
                    System.out.println("Controller: Using FB");
                    double delta = pid.step(filter.getValue()); // pid gives throughput per server

                    double output = tps + delta; // this is the new throughput per server
                    if (output < 50) {
                        output = 50;
                        System.err.println("WARNING!!! pid gave negative/small output!!");
                    }

                    // calculate number of servers needed to handle new throughput
                    double n = (throughput / output) - cluster.getActiveVoldVMsCount();

                    // TODO: Now I ceil. Check if there is a better solution
                    int nn = 0;
                    //                  if(n>0) {
                    nn = (int) Math.ceil(n);
                    //                  } else {
                    //                     nn=(int)Math.floor(n);
                    //                  }// int nn = (int)Math.round(n);

                    System.out.println(
                            "Controller: PID output = " + output + " that is " + n + " -> " + nn + " servers");

                    try {
                        out.write("1 \tFB#" + output + "#" + n + "#" + nn);
                        if (ffFail) {
                            out.write("#FFFail");
                        }
                        out.write("\n");
                        out.flush();
                    } catch (IOException e) {
                        // TODO Auto-generated catch block
                        e.printStackTrace();
                    }
                    if (nn > 0 || (nn < 0 && cluster.getActiveVoldVMsCount() > 3)) {
                        actuator.scheduleRebalance(nn, true);
                    }
                }
            }
        }

        lastTps = tps;
    }
    try {
        out.close();
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

From source file:edu.isi.pfindr.learn.util.PairsFileIO.java

public void readDistinctElementsFromPairsAddClass(String pairsFilepath) {
    //readDistinctElementsIntoList
    List<Object> distinctElements = readDistinctElementsIntoList(pairsFilepath);
    System.out.println("Size of distinctElements" + distinctElements.size());
    for (int i = 0; i < distinctElements.size(); i++) {
        System.out.println("distinctElements " + i + " " + distinctElements.get(i));
    }//from ww  w. j  a va  2s  .  co m

    //get class for those distinct elements from original cohort file
    String originalFile = "data/cohort1/bio_nlp/cohort1_s.txt";
    BufferedReader br = null;
    String thisLine;
    String[] lineArray;
    LinkedMap originalMap = new LinkedMap();
    BufferedWriter distinctPriorityPairsWriter = null;

    try {
        br = new BufferedReader(new FileReader(originalFile));
        while ((thisLine = br.readLine()) != null) {
            thisLine = thisLine.trim();
            if (thisLine.equals(""))
                continue;

            lineArray = thisLine.split("\t");
            originalMap.put(lineArray[3], lineArray[1]);
        }

        //write distinct elements with class to an output file
        StringBuffer outfileBuffer = new StringBuffer();
        for (int i = 0; i < distinctElements.size(); i++)
            outfileBuffer.append(distinctElements.get(i)).append("\t")
                    .append(originalMap.get(distinctElements.get(i)) + "\n");

        distinctPriorityPairsWriter = new BufferedWriter(
                new FileWriter(pairsFilepath.split("\\.")[0] + "_distinct_with_class.txt"));

        distinctPriorityPairsWriter.append(outfileBuffer.toString());
        outfileBuffer.setLength(0);
        distinctPriorityPairsWriter.flush();

    } catch (IOException io) {
        try {
            if (br != null)
                br.close();
            io.printStackTrace();
        } catch (IOException e) {
            System.out.println("Problem occured while closing output stream " + br);
            e.printStackTrace();
        }
    } catch (Exception e) {
        e.printStackTrace();
    }

}

From source file:com.qcadoo.plugins.qcadooExport.internal.ExportToCsvController.java

@Monitorable(threshold = 500)
@ResponseBody/*ww w .jav a  2 s  .c  om*/
@RequestMapping(value = { CONTROLLER_PATH }, method = RequestMethod.POST)
public Object generateCsv(@PathVariable(PLUGIN_IDENTIFIER_VARIABLE) final String pluginIdentifier,
        @PathVariable(VIEW_NAME_VARIABLE) final String viewName, @RequestBody final JSONObject body,
        final Locale locale) {
    try {
        changeMaxResults(body);

        ViewDefinitionState state = crudService.invokeEvent(pluginIdentifier, viewName, body, locale);

        GridComponent grid = (GridComponent) state.getComponentByReference("grid");

        String date = DateFormat.getDateInstance().format(new Date());
        File file = fileService.createExportFile("export_" + grid.getName() + "_" + date + ".csv");

        BufferedWriter output = null;

        try {
            output = new BufferedWriter(
                    new OutputStreamWriter(new FileOutputStream(file), Charset.forName("UTF-8")));

            boolean firstName = true;

            for (String name : grid.getColumnNames().values()) {
                if (firstName) {
                    firstName = false;
                } else {
                    output.append(EXPORTED_DOCUMENT_SEPARATOR);
                }
                output.append("\"").append(normalizeString(name)).append("\"");
            }

            output.append("\n");

            List<Map<String, String>> rows;
            if (grid.getSelectedEntitiesIds().isEmpty()) {
                rows = grid.getColumnValuesOfAllRecords();
            } else {
                rows = grid.getColumnValuesOfSelectedRecords();
            }

            for (Map<String, String> row : rows) {
                boolean firstValue = true;
                for (String value : row.values()) {
                    if (firstValue) {
                        firstValue = false;
                    } else {
                        output.append(EXPORTED_DOCUMENT_SEPARATOR);
                    }
                    output.append("\"").append(normalizeString(value)).append("\"");
                }

                output.append("\n");
            }

            output.flush();
        } catch (IOException e) {
            throw new IllegalStateException(e.getMessage(), e);
        } finally {
            IOUtils.closeQuietly(output);
        }

        state.redirectTo(fileService.getUrl(file.getAbsolutePath()) + "?clean", true, false);

        return crudService.renderView(state);
    } catch (JSONException e) {
        throw new IllegalStateException(e.getMessage(), e);
    }
}

From source file:com.orange.oidc.secproxy_service.HttpOpenidConnect.java

public String doRedirect(String urlRedirect) {
    // android.os.Debug.waitForDebugger();
    try {//from  ww w  . j a v  a 2 s. com

        Log.d(TAG, "mOcp.m_redirect_uri=" + mOcp.m_redirect_uri);

        Log.d(TAG, "urlRedirect=" + urlRedirect);

        // with server phpOIDC, check for '#'
        if ((urlRedirect.startsWith(mOcp.m_redirect_uri + "?"))
                || (urlRedirect.startsWith(mOcp.m_redirect_uri + "#"))) {
            Log.d(TAG, "doRedirect : in check");

            String[] params = urlRedirect.substring(mOcp.m_redirect_uri.length() + 1).split("&");
            String code = "";
            String state = "";
            String state_key = "state";
            for (int i = 0; i < params.length; i++) {
                String param = params[i];
                int idxEqual = param.indexOf('=');
                if (idxEqual >= 0) {
                    String key = param.substring(0, idxEqual);
                    String value = param.substring(idxEqual + 1);
                    if (key.startsWith("code"))
                        code = value;
                    if (key.startsWith("state"))
                        state = value;
                    if (key.startsWith("session_state")) {
                        state = value;
                        state_key = "session_state";
                    }
                }
            }

            // display code and state
            Logd(TAG, "doRedirect => code: " + code + " / state: " + state);

            // doRepost(code,state);
            if (code.length() > 0) {

                // get token_endpoint endpoint
                String token_endpoint = getEndpointFromConfigOidc("token_endpoint", mOcp.m_server_url);

                Log.d(TAG, "token_endpoint=" + token_endpoint);

                if (isEmpty(token_endpoint)) {
                    Logd(TAG, "logout : could not get token_endpoint on server : " + mOcp.m_server_url);
                    return null;
                }

                List<NameValuePair> nameValuePairs = new ArrayList<NameValuePair>(2);
                HttpURLConnection huc = getHUC(token_endpoint);
                huc.setInstanceFollowRedirects(false);

                if (mUsePrivateKeyJWT) {
                    nameValuePairs.add(new BasicNameValuePair("client_assertion_type",
                            "urn:ietf:params:oauth:client-assertion-type:jwt-bearer"));
                    String client_assertion = secureProxy.getPrivateKeyJwt(token_endpoint);
                    Logd(TAG, "client_assertion: " + client_assertion);
                    nameValuePairs.add(new BasicNameValuePair("client_assertion", client_assertion));
                } else {
                    huc.setRequestProperty("Authorization", "Basic " + secureProxy.getClientSecretBasic());
                }

                huc.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");

                huc.setDoOutput(true);
                huc.setChunkedStreamingMode(0);

                OutputStream os = huc.getOutputStream();
                OutputStreamWriter out = new OutputStreamWriter(os, "UTF-8");
                BufferedWriter writer = new BufferedWriter(out);

                nameValuePairs.add(new BasicNameValuePair("grant_type", "authorization_code"));
                Logd(TAG, "code: " + code);
                nameValuePairs.add(new BasicNameValuePair("code", code));
                nameValuePairs.add(new BasicNameValuePair("redirect_uri", mOcp.m_redirect_uri));
                Logd(TAG, "redirect_uri" + mOcp.m_redirect_uri);
                if (state != null && state.length() > 0)
                    nameValuePairs.add(new BasicNameValuePair(state_key, state));

                // write URL encoded string from list of key value pairs
                writer.write(getQuery(nameValuePairs));
                writer.flush();
                writer.close();
                out.close();
                os.close();

                Logd(TAG, "doRedirect => before connect");
                Logd(TAG, "huc=" + huc.toString());
                huc.connect();
                Logd(TAG, "huc2=" + huc.getContentEncoding());
                int responseCode = huc.getResponseCode();
                System.out.println("2 - code " + responseCode);
                Log.d(TAG, "doRedirect => responseCode " + responseCode);
                InputStream in = null;
                try {
                    in = new BufferedInputStream(huc.getInputStream());
                } catch (IOException ioe) {
                    sysout("io exception: " + huc.getErrorStream());
                }
                if (in != null) {
                    String result = convertStreamToString(in);
                    // now you have the string representation of the HTML request
                    in.close();

                    Logd(TAG, "doRedirect: " + result);

                    // save as static for now
                    return result;

                } else {
                    Logd(TAG, "doRedirect null");
                }
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
    return null;
}

From source file:com.roquahacks.semafor4j.FrameNetService.java

public void writeOptionsToConfig() {
    try {/*from w  w  w  .ja  v a  2  s  .  co m*/
        this.configFile = FileUtils.readLines(new File(FrameNetOptions.ABS_PATH_FILE_CONFIG));
        BufferedWriter bw = new BufferedWriter(new FileWriter(new File(FrameNetOptions.ABS_PATH_FILE_CONFIG)));
        for (String s : configFile) {
            if (s.startsWith(SEMAFOR_HOME)) {
                s = SEMAFOR_HOME + FrameNetOptions.ABS_PATH_SEMAFOR;
            } else if (s.startsWith(MST_MODE)) {
                if (this.fnOpt.isServerModeOn()) {
                    s = MST_MODE + "server";
                } else {
                    s = MST_MODE + "noserver";
                }
            } else if (s.startsWith(JAVA_HOME)) {
                s = JAVA_HOME + this.fnOpt.getJavaHomePath();
            } else if (s.startsWith(GOLD_TARGET_FILE)) {
                s = GOLD_TARGET_FILE + this.fnOpt.getGoldTargetsPath();
            } else if (s.startsWith(AUTO_TARGET_ID_MODE)) {
                if (this.fnOpt.isAutoTargetIDStrictModeOn()) {
                    s = AUTO_TARGET_ID_MODE + "strict";
                } else {
                    s = AUTO_TARGET_ID_MODE + "relaxed";
                }
            } else if (s.startsWith(USE_GRAPH_FILES)) {
                if (this.fnOpt.isGraphFilesOn()) {
                    s = USE_GRAPH_FILES + "yes";
                } else {
                    s = USE_GRAPH_FILES + "no";
                }
            } else if (s.startsWith(DECODING_TYPE)) {
                s = DECODING_TYPE + this.fnOpt.getDecodingType();
            }
            bw.write(s);
            bw.newLine();
        }
        bw.flush();
        bw.close();
        this.configFile.clear();
    } catch (FileNotFoundException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:org.shareok.data.kernel.api.services.ServiceUtil.java

public static String executeCommandLineTask(String taskId, String taskType, String data)
        throws InvalidCommandLineArgumentsException, JSONException, IOException {

    BufferedWriter loggingForUserFileInfoFileWr = null;
    BufferedWriter onputFileInfoFileWr = null;
    String message;//from   ww  w. j  av a 2s . c  o m
    String startDate;
    String endDate;
    String outputFilePath = null;
    Integer compare;

    try {
        loggingForUserFileInfoFileWr = new BufferedWriter(
                new FileWriter(DataHandlersUtil.getLoggingForUserFilePath(taskId, taskType), true));
        if (DocumentProcessorUtil.isEmptyString(taskId)) {
            message = "The data argument is null or empty task ID!";
            throw new InvalidCommandLineArgumentsException(message);
        }
        if (DocumentProcessorUtil.isEmptyString(taskType)) {
            message = "The data argument does not specify task type!";
            throw new InvalidCommandLineArgumentsException(message);
        }
        if (DocumentProcessorUtil.isEmptyString(data)) {
            message = "The data provided for execution of the task is empty!\n";
            loggingForUserFileInfoFileWr.write(message);
            throw new InvalidCommandLineArgumentsException(message);
        }

        DataHandlersUtil.CURRENT_TASK_TYPE = taskType;
        JSONObject dataObj = new JSONObject(data);

        switch (taskType) {
        case "journal-search":
            String publisher = dataObj.getString("publisher");
            DataHandlersUtil.CURRENT_TASK_ID = taskId;
            startDate = dataObj.getString("startDate");
            endDate = dataObj.getString("endDate");
            String affiliate = dataObj.getString("affiliate");
            loggingForUserFileInfoFileWr.write("Task information:\n");
            loggingForUserFileInfoFileWr.write("Search publications at " + publisher + " between " + startDate
                    + " and " + endDate + " by authors at " + affiliate + ".\n");
            loggingForUserFileInfoFileWr.flush();

            if (DocumentProcessorUtil.isEmptyString(publisher) || DocumentProcessorUtil.isEmptyString(taskId)
                    || DocumentProcessorUtil.isEmptyString(startDate)
                    || DocumentProcessorUtil.isEmptyString(endDate)
                    || DocumentProcessorUtil.isEmptyString(affiliate)) {
                message = "Cannot get specific information such as publisher, start date, end date, and/or author affiliation to execute the task.\n";
                loggingForUserFileInfoFileWr.write(message);
                throw new InvalidCommandLineArgumentsException(message);
            }
            compare = DataHandlersUtil.datesCompare(startDate, endDate);
            if (compare == null) {
                message = "Cannot parse the start date or the end date!\n";
                loggingForUserFileInfoFileWr.write(message);
                throw new InvalidCommandLineArgumentsException(message);
            } else if (compare > 0) {
                message = "The start date is later than the end date!\n";
                loggingForUserFileInfoFileWr.write(message);
                throw new InvalidCommandLineArgumentsException(message);
            }
            try {
                DspaceJournalDataService serviceObj = ServiceUtil
                        .getDspaceJournalDataServInstanceByPublisher(publisher);
                if (null == serviceObj) {
                    loggingForUserFileInfoFileWr
                            .write("The program has internal error, please contact relative personnel.\n");
                    onputFileInfoFileWr.write("Cannot get the service bean from task type: " + taskType);
                    return null;
                }
                String articlesData = serviceObj.getApiResponseByDatesAffiliate(startDate, endDate, affiliate);
                if (!DocumentProcessorUtil.isEmptyString(articlesData)) {
                    articlesData = articlesData.replace("", "'");
                    outputFilePath = DataHandlersUtil.getTaskFileFolderPath(taskId, taskType) + File.separator
                            + startDate + "_" + endDate + ".json";
                    File outputFile = new File(outputFilePath);
                    if (!outputFile.exists()) {
                        outputFile.createNewFile();
                    }
                    DocumentProcessorUtil.outputStringToFile(articlesData, outputFilePath);
                    System.out.println("article data = " + articlesData);
                    loggingForUserFileInfoFileWr
                            .write("The journal search task has been completed sucessfully.\n");
                } else {
                    loggingForUserFileInfoFileWr
                            .write("The program has internal error, please contact relative personnel.\n");
                    System.out.println(
                            "The " + taskType + " task id=" + taskId + " cannot retrieve the article data!");
                }
            } catch (Exception ex) {
                loggingForUserFileInfoFileWr
                        .write("The program has internal error, please contact relative personnel.\n");
                logger.error("Cannot complete the " + taskType + " with id=" + taskId, ex);
            }
            //                articlesData = articlesData.replaceAll("'", "\\\\\\'");                
            break;
        case "journal-saf":
            String[] dois;
            DataHandlersUtil.CURRENT_TASK_ID = taskId;
            startDate = dataObj.getString("startDate");
            endDate = dataObj.getString("endDate");
            dois = dataObj.getString("dois").split(";");
            if (DocumentProcessorUtil.isEmptyString(startDate) || DocumentProcessorUtil.isEmptyString(endDate)
                    || null == dois || dois.length == 0) {
                message = "Cannot get specific information such as publication DOI, start date, and/or end date to execute the task.\n";
                loggingForUserFileInfoFileWr.write(message);
                throw new InvalidCommandLineArgumentsException(message);
            }
            loggingForUserFileInfoFileWr.write("Task information:\n");
            loggingForUserFileInfoFileWr
                    .write("Generate DSpace SAF packge for publications with DOIs in " + Arrays.toString(dois)
                            + " which are published between " + startDate + " and " + endDate + ".\n");
            loggingForUserFileInfoFileWr.flush();

            compare = DataHandlersUtil.datesCompare(startDate, endDate);
            if (compare == null) {
                message = "Cannot parse the start date or the end date!\n";
                loggingForUserFileInfoFileWr.write(message);
                throw new InvalidCommandLineArgumentsException(message);
            } else if (compare > 0) {
                message = "The start date is later than the end date!\n";
                loggingForUserFileInfoFileWr.write(message);
                throw new InvalidCommandLineArgumentsException(message);
            }

            try {
                outputFilePath = ServiceUtil.generateDspaceSafPackagesByDois(dois, startDate, endDate);
                if (outputFilePath.startsWith("error")) {
                    message = "The DOI provided is not valid: " + outputFilePath + "\n";
                    System.out.println(message);
                    loggingForUserFileInfoFileWr.write(message);
                    throw new ErrorDspaceApiResponseException(message);
                } else if (null == outputFilePath) {
                    loggingForUserFileInfoFileWr
                            .write("The program has internal error, please contact relative personnel.\n");
                    throw new ErrorDspaceApiResponseException("Cannot get null saf path!");
                } else {
                    loggingForUserFileInfoFileWr.write("safPath=" + outputFilePath + "\n");
                    loggingForUserFileInfoFileWr.write("The SAF package has been prepared sucessfully.\n");
                    System.out.println("The SAF package has been stored at path=" + outputFilePath);
                }
            } catch (Exception ex) {
                logger.error(ex);
                loggingForUserFileInfoFileWr
                        .write("The program has internal error, please contact relative personnel.\n");
                ex.printStackTrace();
            }
            break;
        case "journal-import":
            try {
                DataHandlersUtil.CURRENT_TASK_ID = taskId;
                outputFilePath = ShareokdataManager.getDspaceCommandLineTaskOutputPath() + "_"
                        + DataHandlersUtil.getCurrentTimeString() + "_" + taskType + "_" + taskId + ".txt";
                String safPath = dataObj.getString("safPath");
                String collectionHandle = dataObj.getString("collectionHandle");
                String dspaceApiUrl = dataObj.getString("dspaceApiUrl");
                if (DocumentProcessorUtil.isEmptyString(safPath)
                        || DocumentProcessorUtil.isEmptyString(collectionHandle)
                        || DocumentProcessorUtil.isEmptyString(dspaceApiUrl)) {
                    message = "Cannot get specific information such as SAF package path, collection handle, and/or DSpace REST API url to execute the task.\n";
                    loggingForUserFileInfoFileWr.write(message);
                    throw new InvalidCommandLineArgumentsException(message);
                }
                loggingForUserFileInfoFileWr.write("Task information:\n");
                loggingForUserFileInfoFileWr.write("Import DSpace SAF packge into collection: "
                        + collectionHandle + " with DSPace REST API URL=" + dspaceApiUrl + "\n");
                loggingForUserFileInfoFileWr.flush();

                DspaceRestServiceImpl ds = (DspaceRestServiceImpl) getDataService("rest-import-dspace");
                ds.getHandler().setReportFilePath(
                        DataHandlersUtil.getJobReportPath("cli-import-dspace-" + taskType, taskId)
                                + File.separator + taskId + "-report.txt");
                ds.loadItemsFromSafPackage(safPath, collectionHandle, dspaceApiUrl);
            } catch (Exception ex) {
                logger.error(ex);
                ex.printStackTrace();
            }
            break;
        case "saf-build":
            try {
                loggingForUserFileInfoFileWr.write("Task information:\n");
                String csvPath = dataObj.getString("csvPath"); // The full path of the csv file
                String extension = DocumentProcessorUtil.getFileExtension(csvPath);
                if (null == extension || !extension.contains("csv")) {
                    throw new NonCsvFileException("The uploaded file is not a CSV file!");
                }
                loggingForUserFileInfoFileWr
                        .write("Generate a SAF package with metadata file at " + csvPath + ".\n");
                loggingForUserFileInfoFileWr.write("Start generating...\n");
                SAFPackage safPackageInstance = new SAFPackage();
                safPackageInstance.processMetaPack(csvPath, true);
                String csvDirectoryPath = DocumentProcessorUtil.getFileContainerPath(csvPath);
                File csv = new File(csvPath);
                File safPackage = new File(csvDirectoryPath + File.separator + "SimpleArchiveFormat.zip");
                File newPackage = null;
                if (safPackage.exists()) {
                    newPackage = new File(csvDirectoryPath + File.separator
                            + DocumentProcessorUtil.getFileNameWithoutExtension(csv.getName()) + ".zip");
                    if (!newPackage.exists()) {
                        safPackage.renameTo(newPackage);
                    } else {
                        throw new FileAlreadyExistsException("The zip file of the SAF package already exists!");
                    }
                }
                File safPackageFolder = new File(csvDirectoryPath + File.separator + "SimpleArchiveFormat");
                if (safPackageFolder.exists()) {
                    FileUtils.deleteDirectory(safPackageFolder);
                }
                if (null != newPackage) {
                    outputFilePath = safPackage.getAbsolutePath();
                    loggingForUserFileInfoFileWr
                            .write("The new SAF package path is: \nsafPath=[\"" + outputFilePath + "\"]\n");
                    return outputFilePath;
                } else {
                    loggingForUserFileInfoFileWr.write("The new SAF package generation failed.\n");
                    return null;
                }

            } catch (IOException | FileAlreadyExistsException | NonCsvFileException ex) {
                logger.error(ex.getMessage());
                loggingForUserFileInfoFileWr.write("Error:" + ex.getMessage() + "\n");
                loggingForUserFileInfoFileWr.flush();
            }
        default:
            throw new InvalidCommandLineArgumentsException("The command line task type is valid!");
        }
    } catch (Exception ex) {
        ex.printStackTrace();
    } finally {
        if (null != loggingForUserFileInfoFileWr) {
            try {
                loggingForUserFileInfoFileWr.flush();
                loggingForUserFileInfoFileWr.close();
            } catch (IOException ex) {
                ex.printStackTrace();
            }
        }
    }
    return outputFilePath;
}