Example usage for java.io BufferedWriter flush

List of usage examples for java.io BufferedWriter flush

Introduction

In this page you can find the example usage for java.io BufferedWriter flush.

Prototype

public void flush() throws IOException 

Source Link

Document

Flushes the stream.

Usage

From source file:jahspotify.web.api.HistoryController.java

public void serializeHistoryCursor(Collection<TrackHistory> historyCursor,
        HttpServletResponse httpServletResponse) {
    try {/*from   w  w w.j  a  v a 2 s  .co  m*/
        final ServletOutputStream httpOutputStream = httpServletResponse.getOutputStream();
        final BufferedWriter outputStream = new BufferedWriter(new OutputStreamWriter(httpOutputStream));
        outputStream.write("{");
        outputStream.write("\"count\":");
        outputStream.write("" + historyCursor.size());

        if (historyCursor.size() > 0) {
            Gson gson = new Gson();

            outputStream.write(",");
            outputStream.write("\"tracks\":[");

            for (Iterator<TrackHistory> iterator = historyCursor.iterator(); iterator.hasNext();) {
                TrackHistory next = iterator.next();
                outputStream.write(gson.toJson(toWebTrack(next)));
                if (iterator.hasNext()) {
                    outputStream.write(",");
                }
                outputStream.flush();
            }

            /*
                            while (historyCursor.hasNext())
                            {
            outputStream.write(gson.toJson(toWebTrack(historyCursor.next())));
            if (historyCursor.hasNext())
            {
                outputStream.write(",");
            }
            outputStream.flush();
                            }
            */
            outputStream.write("]");
        }
        outputStream.write("}");
        outputStream.flush();
        outputStream.close();
        httpOutputStream.close();
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:coolmap.application.io.internal.cmatrix.DefaultCMatrixExporter.java

@Override
public void dumpData(CMatrix matrix, TFile zipFolder) throws Exception {
    TFile outputFile = new TFile(zipFolder.getAbsolutePath() + File.separator + IOTerm.FILE_DATA);
    TFile propertyFile = new TFile(zipFolder.getAbsolutePath() + File.separator + IOTerm.FILE_PROPERTY);

    BufferedWriter propertyWriter = new BufferedWriter(
            new OutputStreamWriter(new TFileOutputStream(propertyFile)));

    JSONObject cmatrixPropertyEntry = new JSONObject();
    cmatrixPropertyEntry.put(IOTerm.ATTR_ID, matrix.getID());
    cmatrixPropertyEntry.put(IOTerm.ATTR_NAME, matrix.getName());
    cmatrixPropertyEntry.put(IOTerm.ATTR_CMATRIX_NUMROW, matrix.getNumRows());
    cmatrixPropertyEntry.put(IOTerm.ATTR_CMATRIX_NUMCOLUMN, matrix.getNumColumns());
    cmatrixPropertyEntry.put(IOTerm.ATTR_CLASS, matrix.getClass().getName());
    cmatrixPropertyEntry.put(IOTerm.ATTR_CMATRIX_MEMBERCLASS, matrix.getMemberClass().getName());

    //        System.out.println(cmatrixPropertyEntry);

    propertyWriter.write(cmatrixPropertyEntry.toString());

    propertyWriter.flush();
    propertyWriter.close();//from  w  ww . j  av  a2 s  .  co  m

    BufferedWriter dataWriter = new BufferedWriter(new OutputStreamWriter(new TFileOutputStream(outputFile)));

    //        dataWriter.write("This is where cmatrix will be dumped");
    //        part of the following code can be extracted for other use
    //        dataWriter properties
    dataWriter.write("Row/Column");
    for (int i = 0; i < matrix.getNumColumns(); i++) {
        dataWriter.write("\t");
        String colLabelString = matrix.getColLabel(i);
        if (colLabelString == null)
            colLabelString = "";
        dataWriter.write(colLabelString);
    }
    dataWriter.write("\n");

    for (int i = 0; i < matrix.getNumRows(); i++) {

        String rowLabelString = matrix.getRowLabel(i);
        if (rowLabelString == null)
            rowLabelString = "";
        dataWriter.write(rowLabelString);

        for (int j = 0; j < matrix.getNumColumns(); j++) {
            dataWriter.write("\t");
            Object value = matrix.getValue(i, j);

            if (value != null) {
                dataWriter.write(value.toString());
            }
        }
        dataWriter.write("\n");

    }

    dataWriter.flush();
    dataWriter.close();

    //        System.out.println("Dumping successful");
}

From source file:com.syncedsynapse.kore2.jsonrpc.HostConnection.java

/**
 * Send a TCP request/*from   www  . ja v a 2s. co m*/
 * @param socket Socket to write to
 * @param request Request to send
 * @throws ApiException
 */
private void sendTcpRequest(Socket socket, String request) throws ApiException {
    try {
        LogUtils.LOGD(TAG, "Sending request via TCP: " + request);
        BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(socket.getOutputStream()));
        writer.write(request);
        writer.flush();
    } catch (Exception e) {
        LogUtils.LOGW(TAG, "Failed to send TCP request.", e);
        disconnect();
        throw new ApiException(ApiException.IO_EXCEPTION_WHILE_SENDING_REQUEST, e);
    }
}

From source file:gov.nih.nci.nbia.StandaloneDMDispatcher.java

private void install(String downloadUrl) {
    Double vNum = 0.0;/*w ww.  j  av  a  2 s .co m*/
    if (appVersion != null) {
        vNum = Double.parseDouble(appVersion);
    }
    String installerPath = getInstallerName(downloadUrl);
    if (os.contains("windows")) {
        try {
            Runtime.getRuntime().exec("msiexec /i \"" + installerPath + "\"");
        } catch (Exception e) {
            e.printStackTrace();
        }
    } else if (os.startsWith("mac")) {
        try {
            Runtime.getRuntime().exec(new String[] { "/usr/bin/open", installerPath });
        } catch (Exception e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
    } else {
        JLabel pwLabel = new JLabel("Sudo Password");
        JTextField password = new JPasswordField();
        Object[] objs = { pwLabel, password };
        int result = JOptionPane.showConfirmDialog(null, objs, "Please enter a sudo password",
                JOptionPane.OK_CANCEL_OPTION);
        String pas = null;
        if (result == JOptionPane.OK_OPTION) {
            pas = password.getText();
        }

        if (pas != null) {
            if (os.equals("CentOS")) {
                // sudo yum install TCIADownloader-1.0-1.x86_64.rpm
                try {
                    String upgradCmd = "/usr/bin/sudo -S yum -q -y remove TCIADownloader.x86_64;/usr/bin/sudo -S yum -y -q install ";
                    if (vNum >= 3.2)
                        upgradCmd = "/usr/bin/sudo -S yum -q -y remove NBIADataRetriever.x86_64;/usr/bin/sudo -S yum -y -q install ";

                    String[] cmd = { "/bin/bash", "-c", upgradCmd + installerPath };

                    Process pb = Runtime.getRuntime().exec(cmd);
                    BufferedWriter writer = null;
                    writer = new BufferedWriter(new OutputStreamWriter(pb.getOutputStream()));
                    writer.write(pas);
                    writer.write('\n');
                    writer.flush();

                    String status = null;

                    if (pb.waitFor() == 0) {
                        status = "successfully";
                    } else {
                        status = "unsuccessfully";
                    }

                    JOptionPane.showMessageDialog(null,
                            "Installation of new version of NBIA Data Retriever is completed " + status + ".");
                } catch (IOException | InterruptedException e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
            } else if (os.equals("Ubuntu")) {
                // sudo dpkg -i tciadownloader_1.0-2_amd64.deb
                String upgradCmd = "/usr/bin/sudo -S dpkg -i ";
                if (vNum >= 3.2)
                    upgradCmd = "/usr/bin/sudo -S dpkg -i nbia-data-retriever; /usr/bin/sudo -S dpkg -i ";
                try {
                    String[] cmd = { "/bin/bash", "-c", upgradCmd + installerPath };

                    Process pb = Runtime.getRuntime().exec(cmd);
                    BufferedWriter writer = null;
                    writer = new BufferedWriter(new OutputStreamWriter(pb.getOutputStream()));
                    writer.write(pas);
                    writer.write('\n');
                    writer.flush();

                    String status = null;

                    if (pb.waitFor() == 0) {
                        status = "successfully";
                    } else {
                        status = "unsuccessfully";
                    }

                    JOptionPane.showMessageDialog(null,
                            "Installation of new version of NBIA Data Retriever is completed " + status + ".");
                } catch (IOException | InterruptedException e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
            }
        }
    }
}

From source file:com.taobao.diamond.client.processor.ServerAddressProcessor.java

void storeServerAddressesToLocal() {
    List<String> domainNameList = new ArrayList<String>(diamondConfigure.getDomainNameList());
    PrintWriter printWriter = null;
    BufferedWriter bufferedWriter = null;
    try {//ww w  .j  a  va2s.  c o  m
        File serverAddressFile = new File(
                generateLocalFilePath(this.diamondConfigure.getFilePath(), "ServerAddress"));
        if (!serverAddressFile.exists()) {
            serverAddressFile.createNewFile();
        }
        printWriter = new PrintWriter(serverAddressFile);
        bufferedWriter = new BufferedWriter(printWriter);
        for (String serveraddress : domainNameList) {
            bufferedWriter.write(serveraddress);
            bufferedWriter.newLine();
        }
        bufferedWriter.flush();
    } catch (Exception e) {
        log.error("", e);
    } finally {
        if (bufferedWriter != null) {
            try {
                bufferedWriter.close();
            } catch (IOException e) {
                // ignore
            }
        }
        if (printWriter != null) {
            printWriter.close();
        }
    }
}

From source file:massbank.admin.VersionManager.java

/**
 * CGIwb_?[?/*from w ww .  j  a  v  a2s  . c om*/
 * @param absPath t@CpX
 */
private void overwriteHeader(String absPath) {
    try {
        // t@C?
        InputStreamReader reader = new InputStreamReader(new FileInputStream(absPath), "UTF-8");
        BufferedReader br = new BufferedReader(reader);
        StringBuffer text = new StringBuffer("");
        String line = "";
        while ((line = br.readLine()) != null) {
            int pos = line.indexOf("#!");
            if (pos >= 0) {
                // wb_?[?
                text.append(cgiHeader + "\n");
            } else {
                text.append(line + "\n");
            }
        }
        br.close();

        // t@C??
        OutputStreamWriter writer = new OutputStreamWriter(new FileOutputStream(absPath), "UTF-8");
        BufferedWriter bw = new BufferedWriter(writer);
        bw.write(text.toString());
        bw.flush();
        bw.close();
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:com.taobao.android.builder.tasks.transform.AtlasProguardTransform.java

@Override
public void transform(TransformInvocation invocation) throws TransformException {

    firstTime = true;/*from ww w  .  ja v  a2s .  c  om*/
    ConfigurableFileCollection oldConfigurableFileCollection = (ConfigurableFileCollection) ReflectUtils
            .getField(ProguardConfigurable.class, oldTransform, "configurationFiles");

    //?consumerproguardFiles,????fastprogaurd,consumerproguardfiles?app?proguard
    if (appVariantContext.getAtlasExtension().getTBuildConfig().isFastProguard()) {
        defaultProguardFiles.addAll(
                appVariantContext.getVariantData().getVariantConfiguration().getBuildType().getProguardFiles());
    } else {

        //            defaultProguardFiles.addAll(oldConfigurableFileCollection.getFiles());
        defaultProguardFiles.addAll(
                appVariantContext.getVariantData().getVariantConfiguration().getBuildType().getProguardFiles());
        nonConsumerProguardFiles.addAll(
                appVariantContext.getVariantData().getVariantConfiguration().getBuildType().getProguardFiles());

    }
    List<AwbBundle> awbBundles = AtlasBuildContext.androidDependencyTrees
            .get(appVariantContext.getScope().getVariantConfiguration().getFullName()).getAwbBundles();
    if (awbBundles != null && awbBundles.size() > 0) {
        File bundleRKeepFile = new File(
                appVariantContext.getBaseVariantData().getScope().getGlobalScope().getIntermediatesDir(),
                "awb-progrard/bundleRKeep.cfg");
        if (!bundleRKeepFile.getParentFile().exists()) {
            bundleRKeepFile.getParentFile().mkdirs();
        }

        StringBuilder keepRStr = new StringBuilder();
        for (AwbBundle bundleItem : awbBundles) {
            keepRStr.append(String.format("-keep class %s.R{*;}\n", bundleItem.bundleInfo.getPkgName()));
            keepRStr.append(String.format("-keep class %s.R$*{*;}\n", bundleItem.bundleInfo.getPkgName()));
        }
        try {
            BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(bundleRKeepFile));
            bufferedWriter.write(keepRStr.toString());
            bufferedWriter.flush();
            IOUtils.closeQuietly(bufferedWriter);
            FileLogger.getInstance("proguard").log("R keep infos: " + keepRStr);
        } catch (IOException e) {
            throw new RuntimeException("generate bundleRkeepFile failed", e);
        }
        appVariantContext.getBaseVariantData().getVariantConfiguration().getBuildType().getProguardFiles()
                .add(bundleRKeepFile);
        defaultProguardFiles.add(bundleRKeepFile);
    }

    if (appVariantContext.getAtlasExtension().getTBuildConfig().isFastProguard()) {
        fastTransform(invocation);
        return;
    }

    try {

        oldConfigurableFileCollection = appVariantContext.getProject().files().from(nonConsumerProguardFiles);

        ReflectUtils.updateField(this, "configurationFiles", oldConfigurableFileCollection);

        //            Configuration configuration = (Configuration) ReflectUtils.getField(BaseProguardAction.class,
        //                    oldTransform, "configuration");
        //            if (null == this.configuration.keep) {
        //                this.configuration.keep = new ArrayList();
        //            }
        //            if (null != configuration.keep) {
        //                this.configuration.keep.addAll(configuration.keep);
        //            }
        //
    } catch (Exception e) {
        throw new GradleException(e.getMessage(), e);
    }

    //apply bundle Inout
    AtlasProguardHelper.applyBundleInOutConfigration(appVariantContext, this);

    //apply bundle's configuration, Switch control
    if (buildConfig.isBundleProguardConfigEnabled()) {
        AtlasProguardHelper.applyBundleProguardConfigration(appVariantContext, this);
    }

    //apply mapping
    AtlasProguardHelper.applyMapping(appVariantContext, this);

    //set output
    File proguardOutFile = new File(appVariantContext.getProject().getBuildDir(), "outputs/proguard.cfg");
    this.printconfiguration(proguardOutFile);

    doTransform(invocation);
}

From source file:cn.leancloud.diamond.client.processor.ServerAddressProcessor.java

void storeServerAddressesToLocal() {
    List<String> domainNameList = new ArrayList<String>(diamondConfigure.getDomainNameList());
    PrintWriter printWriter = null;
    BufferedWriter bufferedWriter = null;
    try {//from  w w  w  . j a v  a2  s  .c  om
        File serverAddressFile = new File(
                generateLocalFilePath(this.diamondConfigure.getFilePath(), "ServerAddress"));
        if (!serverAddressFile.exists()) {
            serverAddressFile.createNewFile();
        }
        printWriter = new PrintWriter(serverAddressFile);
        bufferedWriter = new BufferedWriter(printWriter);
        for (String serveraddress : domainNameList) {
            bufferedWriter.write(serveraddress);
            bufferedWriter.newLine();
        }
        bufferedWriter.flush();
    } catch (Exception e) {
        log.error("??", e);
    } finally {
        if (bufferedWriter != null) {
            try {
                bufferedWriter.close();
            } catch (IOException e) {
                // ignore
            }
        }
        if (printWriter != null) {
            printWriter.close();
        }
    }
}

From source file:com.ebay.oss.bark.service.DqScheduleServiceImpl.java

public void generateAllWaitingJobsRunningConfigs() {
    try {//from  w  w  w .jav a  2  s.  co  m
        logger.info("===========generating running config===============");
        Properties env = new Properties();
        env.load(Thread.currentThread().getContextClassLoader().getResourceAsStream("application.properties"));
        String environment = env.getProperty("env");

        for (DqJob eachJob : jobRepo.getByStatus(JobStatus.READY)) {
            String jobid = eachJob.getId();
            int jobtype = eachJob.getJobType();
            StringBuffer doneFiles = new StringBuffer();
            StringBuffer runningParameter = new StringBuffer();

            if (jobtype == ModelType.ACCURACY) {
                String modelid = eachJob.getModelList();
                long ts = eachJob.getStarttime();
                Date dt = new Date(ts);
                SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMdd");
                String dateString = formatter.format(dt);
                SimpleDateFormat formatter2 = new SimpleDateFormat("HH");
                String hourString = formatter2.format(dt);

                DqModel model = dqModelRepo.findByColumn("modelId", modelid);
                if (model == null) {
                    logger.warn("===================can not find model " + modelid);
                    continue;
                }

                String content = model.getModelContent();

                String[] contents = content.split("\\|");
                String srcPlatform = contents[0];
                String srcSystem = contents[1];
                String tgtPlatform = contents[2];
                String tgtSystem = contents[3];

                String[] attributesArray = contents[4].split(";");
                String[] attributes = attributesArray[0].split(",");
                String srcDataset = attributes[0].substring(0, attributes[0].lastIndexOf("."));
                String tgtDataset = attributes[1].substring(0, attributes[1].lastIndexOf("."));

                //               runningParameter.append(System.getProperty("line.separator")+srcPlatform+" "+srcSystem+" "+srcDataset);
                //               runningParameter.append(System.getProperty("line.separator")+tgtPlatform+" "+tgtSystem+" "+tgtDataset);

                List<Pair> queryList = new ArrayList<Pair>();
                queryList.add(new Pair("platform", srcPlatform));
                queryList.add(new Pair("system", srcSystem));
                queryList.add(new Pair("assetName", srcDataset));
                logger.info("===================find source object " + srcPlatform + " " + srcSystem + " "
                        + srcDataset);
                DBObject srcObj = dataAssetRepo.getByCondition(queryList);
                DataAsset srcAsset = new DataAsset(srcObj);

                List<Pair> queryList2 = new ArrayList<Pair>();
                queryList2.add(new Pair("platform", tgtPlatform));
                queryList2.add(new Pair("system", tgtSystem));
                queryList2.add(new Pair("assetName", tgtDataset));
                logger.info("===================find target object " + tgtPlatform + " " + tgtSystem + " "
                        + tgtDataset);
                DBObject tgtObj = dataAssetRepo.getByCondition(queryList2);
                DataAsset tgtAsset = new DataAsset(tgtObj);

                doneFiles.append(updateHDFSDirTemplateString(srcAsset.getAssetHDFSPath(), dateString,
                        hourString) + System.getProperty("line.separator")
                        + updateHDFSDirTemplateString(tgtAsset.getAssetHDFSPath(), dateString, hourString)
                        + System.getProperty("line.separator"));
                if (model.getSchedule() == ScheduleType.HOURLY && model.getSystem() == SystemType.BULLSEYE) {
                    Date dt4be = new Date(ts + 3600000);
                    //                  SimpleDateFormat formatter4be = new SimpleDateFormat("yyyyMMdd");
                    String dateString4be = formatter.format(dt4be);
                    //                  SimpleDateFormat formatter24be = new SimpleDateFormat("HH");
                    String hourString4be = formatter2.format(dt4be);
                    doneFiles.append(updateHDFSDirTemplateString(tgtAsset.getAssetHDFSPath(), dateString4be,
                            hourString4be) + System.getProperty("line.separator"));
                }

                AccuracyHiveJobConfig config = new AccuracyHiveJobConfig();
                List<AccuracyHiveJobConfigDetail> configDetailList = new ArrayList<AccuracyHiveJobConfigDetail>();
                for (String tempAttribute : attributesArray) {
                    String[] tempAttributeArray = tempAttribute.split(",");
                    String srcColName = tempAttributeArray[0]
                            .substring(tempAttributeArray[0].lastIndexOf(".") + 1);
                    String tgtColName = tempAttributeArray[1]
                            .substring(tempAttributeArray[1].lastIndexOf(".") + 1);
                    configDetailList.add(new AccuracyHiveJobConfigDetail(srcAsset.getColId(srcColName),
                            srcColName, tgtAsset.getColId(tgtColName), tgtColName, tempAttributeArray[3],
                            Boolean.parseBoolean(tempAttributeArray[2].toUpperCase())));
                }
                config.setAccuracyMapping(configDetailList);
                config.setSource(srcAsset.getAssetName());
                config.setTarget(tgtAsset.getAssetName());

                config.setSrcPartitions(getPartitionList(srcAsset, ts));

                List<List<PartitionConfig>> tgtPartitions = new ArrayList<List<PartitionConfig>>();
                tgtPartitions.add(getPartitionList(tgtAsset, ts));
                if (model.getSchedule() == ScheduleType.HOURLY && model.getSystem() == SystemType.BULLSEYE) {
                    tgtPartitions.add(getPartitionList(tgtAsset, ts + 3600000));
                }

                config.setTgtPartitions(tgtPartitions);

                Gson gson = new Gson();
                runningParameter.append(gson.toJson(config) + System.getProperty("line.separator"));

            } else if (jobtype == ModelType.VALIDITY) {

                String modelList = eachJob.getModelList();
                long ts = eachJob.getStarttime();
                Date dt = new Date(ts);
                SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMdd");
                String dateString = formatter.format(dt);
                SimpleDateFormat formatter2 = new SimpleDateFormat("HH");
                String hourString = formatter2.format(dt);

                List<String> models = new ArrayList<String>();
                if (!modelList.contains(ScheduleModelSeperator.SEPERATOR)) {
                    models.add(modelList);
                } else {
                    models = Arrays.asList(modelList.split(ScheduleModelSeperator.SPLIT_SEPERATOR));
                }

                if (models.size() == 0)
                    return;
                logger.debug("+++ model id value: " + models.get(0));
                DqModel model = dqModelRepo.findByColumn("modelId", models.get(0));
                logger.debug("--- model: " + model);
                if (model == null) {
                    continue;
                }
                DataAsset srcAsset = dataAssetRepo.getById((long) model.getAssetId());

                doneFiles
                        .append(updateHDFSDirTemplateString(srcAsset.getAssetHDFSPath(), dateString, hourString)
                                + System.getProperty("line.separator"));

                ValidateHiveJobConfig config = new ValidateHiveJobConfig(srcAsset.getAssetName());
                config.setTimePartitions(getPartitionList(srcAsset, ts));

                for (String modelname : models) {
                    model = dqModelRepo.findByColumn("modelId", modelname);
                    if (model == null) {
                        logger.warn("===================can not find model " + modelname);
                        continue;
                    }

                    String content = model.getModelContent();
                    String[] contents = content.split("\\|");
                    String calType = contents[2];
                    String calColname = contents[3];

                    config.addColumnCalculation(srcAsset.getColId(calColname), calColname,
                            Integer.parseInt(calType));
                }

                Gson gson = new Gson();
                runningParameter.append(gson.toJson(config) + System.getProperty("line.separator"));
            }

            logger.info("====================" + env.getProperty("job.local.folder") + File.separator + jobid
                    + File.separator + "cmd.txt");

            String dir = env.getProperty("job.local.folder") + File.separator + jobid + File.separator
                    + "cmd.txt";
            createFile(dir);
            File file = new File(dir);
            FileWriter fw = new FileWriter(file.getAbsoluteFile());
            BufferedWriter bw = new BufferedWriter(fw);
            bw.write(runningParameter.toString());
            bw.flush();
            bw.close();

            String dir2 = env.getProperty("job.local.folder") + File.separator + jobid + File.separator
                    + "watchfile.txt";
            createFile(dir2);
            File file2 = new File(dir2);
            FileWriter fw2 = new FileWriter(file2.getAbsoluteFile());
            BufferedWriter bw2 = new BufferedWriter(fw2);
            bw2.write(doneFiles.toString());
            bw2.flush();
            bw2.close();

            logger.info("====================create file done");

            if (environment.equals("prod")) {
                String hdfs = env.getProperty("job.hdfs.folder") + "/"
                        + env.getProperty("job.hdfs.runningfoldername");
                Process process1 = Runtime.getRuntime()
                        .exec("hadoop fs -mkdir " + hdfs + File.separator + jobid);
                logger.info("====================hadoop fs -mkdir " + hdfs + File.separator + jobid);
                process1.waitFor();
                ;
                Process process2 = Runtime.getRuntime()
                        .exec("hadoop fs -put " + dir + " " + hdfs + File.separator + jobid + File.separator);
                logger.info("====================hadoop fs -put " + dir + " " + hdfs + File.separator + jobid
                        + File.separator);
                process2.waitFor();
                Process process2_1 = Runtime.getRuntime().exec("hadoop fs -put " + dir2 + " " + hdfs
                        + File.separator + jobid + File.separator + "_watchfile");
                logger.info("====================hadoop fs -put " + dir2 + " " + hdfs + File.separator + jobid
                        + File.separator + "_watchfile");
                process2_1.waitFor();
                Process process3 = Runtime.getRuntime().exec("hadoop fs -touchz " + hdfs + File.separator
                        + jobid + File.separator + "_type_" + jobtype + ".done");
                logger.info("====================hadoop fs -touchz " + hdfs + File.separator + jobid
                        + File.separator + "_type_" + jobtype + ".done");
                process3.waitFor();

            }

            //file.delete();
            new File(env.getProperty("job.local.folder") + File.separator + jobid).delete();
            logger.info("====================delete file done");

            eachJob.setJobType(JobStatus.WAITING); // FIXME numeric issue???!!!
            jobRepo.update(eachJob);
            logger.info("====================udpate status done");
        }

    } catch (Exception e) {
        logger.error(e.toString(), e);
    }

}

From source file:com.ebay.oss.griffin.service.DqScheduleServiceImpl.java

void generateAllWaitingJobsRunningConfigs() {
    try {//w ww  .  j a  va 2 s. co m
        logger.info("===========generating running config===============");
        Properties env = new Properties();
        env.load(Thread.currentThread().getContextClassLoader().getResourceAsStream("application.properties"));
        String environment = env.getProperty("env");

        for (DqJob eachJob : jobRepo.getByStatus(JobStatus.READY)) {
            String jobid = eachJob.getId();
            int jobtype = eachJob.getJobType();
            StringBuffer doneFiles = new StringBuffer();
            StringBuffer runningParameter = new StringBuffer();

            if (jobtype == ModelType.ACCURACY) {
                String modelid = eachJob.getModelList();
                long ts = eachJob.getStarttime();
                Date dt = new Date(ts);
                SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMdd");
                String dateString = formatter.format(dt);
                SimpleDateFormat formatter2 = new SimpleDateFormat("HH");
                String hourString = formatter2.format(dt);

                DqModel model = dqModelRepo.findByColumn("modelId", modelid);
                if (model == null) {
                    logger.warn("===================can not find model " + modelid);
                    continue;
                }

                String content = model.getModelContent();

                String[] contents = content.split("\\|");
                String srcPlatform = contents[0];
                String srcSystem = contents[1];
                String tgtPlatform = contents[2];
                String tgtSystem = contents[3];

                String[] attributesArray = contents[4].split(";");
                String[] attributes = attributesArray[0].split(",");
                String srcDataset = attributes[0].substring(0, attributes[0].lastIndexOf("."));
                String tgtDataset = attributes[1].substring(0, attributes[1].lastIndexOf("."));

                //               runningParameter.append(System.getProperty("line.separator")+srcPlatform+" "+srcSystem+" "+srcDataset);
                //               runningParameter.append(System.getProperty("line.separator")+tgtPlatform+" "+tgtSystem+" "+tgtDataset);

                List<Pair> queryList = new ArrayList<Pair>();
                queryList.add(new Pair("platform", srcPlatform));
                queryList.add(new Pair("system", srcSystem));
                queryList.add(new Pair("assetName", srcDataset));
                logger.info("===================find source object " + srcPlatform + " " + srcSystem + " "
                        + srcDataset);
                DBObject srcObj = dataAssetRepo.getByCondition(queryList);
                DataAsset srcAsset = new DataAsset(srcObj);

                List<Pair> queryList2 = new ArrayList<Pair>();
                queryList2.add(new Pair("platform", tgtPlatform));
                queryList2.add(new Pair("system", tgtSystem));
                queryList2.add(new Pair("assetName", tgtDataset));
                logger.info("===================find target object " + tgtPlatform + " " + tgtSystem + " "
                        + tgtDataset);
                DBObject tgtObj = dataAssetRepo.getByCondition(queryList2);
                DataAsset tgtAsset = new DataAsset(tgtObj);

                doneFiles.append(updateHDFSDirTemplateString(srcAsset.getAssetHDFSPath(), dateString,
                        hourString) + System.getProperty("line.separator")
                        + updateHDFSDirTemplateString(tgtAsset.getAssetHDFSPath(), dateString, hourString)
                        + System.getProperty("line.separator"));
                if (model.getSchedule() == ScheduleType.HOURLY && model.getSystem() == SystemType.BULLSEYE) {
                    Date dt4be = new Date(ts + 3600000);
                    //                  SimpleDateFormat formatter4be = new SimpleDateFormat("yyyyMMdd");
                    String dateString4be = formatter.format(dt4be);
                    //                  SimpleDateFormat formatter24be = new SimpleDateFormat("HH");
                    String hourString4be = formatter2.format(dt4be);
                    doneFiles.append(updateHDFSDirTemplateString(tgtAsset.getAssetHDFSPath(), dateString4be,
                            hourString4be) + System.getProperty("line.separator"));
                }

                AccuracyHiveJobConfig config = new AccuracyHiveJobConfig();
                List<AccuracyHiveJobConfigDetail> configDetailList = new ArrayList<AccuracyHiveJobConfigDetail>();
                for (String tempAttribute : attributesArray) {
                    String[] tempAttributeArray = tempAttribute.split(",");
                    String srcColName = tempAttributeArray[0]
                            .substring(tempAttributeArray[0].lastIndexOf(".") + 1);
                    String tgtColName = tempAttributeArray[1]
                            .substring(tempAttributeArray[1].lastIndexOf(".") + 1);
                    configDetailList.add(new AccuracyHiveJobConfigDetail(srcAsset.getColId(srcColName),
                            srcColName, tgtAsset.getColId(tgtColName), tgtColName, tempAttributeArray[3],
                            Boolean.parseBoolean(tempAttributeArray[2].toUpperCase())));
                }
                config.setAccuracyMapping(configDetailList);
                config.setSource(srcAsset.getAssetName());
                config.setTarget(tgtAsset.getAssetName());

                config.setSrcPartitions(getPartitionList(srcAsset, ts));

                List<List<PartitionConfig>> tgtPartitions = new ArrayList<List<PartitionConfig>>();
                tgtPartitions.add(getPartitionList(tgtAsset, ts));
                if (model.getSchedule() == ScheduleType.HOURLY && model.getSystem() == SystemType.BULLSEYE) {
                    tgtPartitions.add(getPartitionList(tgtAsset, ts + 3600000));
                }

                config.setTgtPartitions(tgtPartitions);

                Gson gson = new Gson();
                runningParameter.append(gson.toJson(config) + System.getProperty("line.separator"));

            } else if (jobtype == ModelType.VALIDITY) {

                String modelList = eachJob.getModelList();
                long ts = eachJob.getStarttime();
                Date dt = new Date(ts);
                SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMdd");
                String dateString = formatter.format(dt);
                SimpleDateFormat formatter2 = new SimpleDateFormat("HH");
                String hourString = formatter2.format(dt);

                List<String> models = new ArrayList<String>();
                if (!modelList.contains(ScheduleModelSeperator.SEPERATOR)) {
                    models.add(modelList);
                } else {
                    models = Arrays.asList(modelList.split(ScheduleModelSeperator.SPLIT_SEPERATOR));
                }

                if (models.size() == 0)
                    return;
                logger.debug("+++ model id value: " + models.get(0));
                DqModel model = dqModelRepo.findByColumn("modelId", models.get(0));
                logger.debug("--- model: " + model);
                if (model == null) {
                    continue;
                }
                DataAsset srcAsset = dataAssetRepo.getById((long) model.getAssetId());

                doneFiles
                        .append(updateHDFSDirTemplateString(srcAsset.getAssetHDFSPath(), dateString, hourString)
                                + System.getProperty("line.separator"));

                ValidateHiveJobConfig config = new ValidateHiveJobConfig(srcAsset.getAssetName());
                config.setTimePartitions(getPartitionList(srcAsset, ts));

                for (String modelname : models) {
                    model = dqModelRepo.findByColumn("modelId", modelname);
                    if (model == null) {
                        logger.warn("===================can not find model " + modelname);
                        continue;
                    }

                    String content = model.getModelContent();
                    String[] contents = content.split("\\|");
                    String calType = contents[2];
                    String calColname = contents[3];

                    config.addColumnCalculation(srcAsset.getColId(calColname), calColname,
                            Integer.parseInt(calType));
                }

                Gson gson = new Gson();
                runningParameter.append(gson.toJson(config) + System.getProperty("line.separator"));
            }

            logger.info("====================" + env.getProperty("job.local.folder") + File.separator + jobid
                    + File.separator + "cmd.txt");

            String dir = env.getProperty("job.local.folder") + File.separator + jobid + File.separator
                    + "cmd.txt";
            createFile(dir);
            File file = new File(dir);
            FileWriter fw = new FileWriter(file.getAbsoluteFile());
            BufferedWriter bw = new BufferedWriter(fw);
            bw.write(runningParameter.toString());
            bw.flush();
            bw.close();

            String dir2 = env.getProperty("job.local.folder") + File.separator + jobid + File.separator
                    + "watchfile.txt";
            createFile(dir2);
            File file2 = new File(dir2);
            FileWriter fw2 = new FileWriter(file2.getAbsoluteFile());
            BufferedWriter bw2 = new BufferedWriter(fw2);
            bw2.write(doneFiles.toString());
            bw2.flush();
            bw2.close();

            logger.info("====================create file done");

            if (environment.equals("prod")) {
                String hdfs = env.getProperty("job.hdfs.folder") + "/"
                        + env.getProperty("job.hdfs.runningfoldername");
                Process process1 = Runtime.getRuntime()
                        .exec("hadoop fs -mkdir " + hdfs + File.separator + jobid);
                logger.info("====================hadoop fs -mkdir " + hdfs + File.separator + jobid);
                process1.waitFor();
                ;
                Process process2 = Runtime.getRuntime()
                        .exec("hadoop fs -put " + dir + " " + hdfs + File.separator + jobid + File.separator);
                logger.info("====================hadoop fs -put " + dir + " " + hdfs + File.separator + jobid
                        + File.separator);
                process2.waitFor();
                Process process2_1 = Runtime.getRuntime().exec("hadoop fs -put " + dir2 + " " + hdfs
                        + File.separator + jobid + File.separator + "_watchfile");
                logger.info("====================hadoop fs -put " + dir2 + " " + hdfs + File.separator + jobid
                        + File.separator + "_watchfile");
                process2_1.waitFor();
                Process process3 = Runtime.getRuntime().exec("hadoop fs -touchz " + hdfs + File.separator
                        + jobid + File.separator + "_type_" + jobtype + ".done");
                logger.info("====================hadoop fs -touchz " + hdfs + File.separator + jobid
                        + File.separator + "_type_" + jobtype + ".done");
                process3.waitFor();

            }

            //file.delete();
            new File(env.getProperty("job.local.folder") + File.separator + jobid).delete();
            logger.info("====================delete file done");

            eachJob.setStatus(JobStatus.WAITING);
            jobRepo.update(eachJob);
            logger.info("====================udpate status done");
        }

    } catch (Exception e) {
        logger.error(e.toString(), e);
    }

}