Example usage for org.apache.commons.lang ArrayUtils addAll

List of usage examples for org.apache.commons.lang ArrayUtils addAll

Introduction

In this page you can find the example usage for org.apache.commons.lang ArrayUtils addAll.

Prototype

public static double[] addAll(double[] array1, double[] array2) 

Source Link

Document

Adds all the elements of the given arrays into a new array.

Usage

From source file:edu.harvard.iq.dvn.ingest.dsb.impl.DvnRJobRequest.java

public List<String> getUpdatedVariableTypesAsString() {
    int[] vt;/*from   w  w w .j a  va  2  s .co m*/

    if (hasRecodedVariables()) {
        vt = ArrayUtils.addAll(getVariableTypes(), getRecodedVarTypeSet());
    } else {
        vt = getVariableTypes();
    }

    List<String> vts = new ArrayList<String>();

    for (int i = 0; i < vt.length; i++) {
        vts.add(Integer.toString(vt[i]));
    }
    return vts;
}

From source file:de.tudarmstadt.ukp.dkpro.core.corenlp.CoreNlpDependencyParserTest.java

private JCas runTest(String aLanguage, String aVariant, String aText, Object... aExtraParams) throws Exception {
    AssumeResource.assumeResource(CoreNlpDependencyParser.class, "depparser", aLanguage, aVariant);

    AggregateBuilder aggregate = new AggregateBuilder();

    aggregate.add(createEngineDescription(CoreNlpPosTagger.class));
    Object[] params = new Object[] { CoreNlpDependencyParser.PARAM_VARIANT, aVariant,
            CoreNlpDependencyParser.PARAM_PRINT_TAGSET, true };
    params = ArrayUtils.addAll(params, aExtraParams);
    aggregate.add(createEngineDescription(CoreNlpDependencyParser.class, params));

    return TestRunner.runTest(aggregate.createAggregateDescription(), aLanguage, aText);
}

From source file:edu.harvard.iq.dvn.ingest.dsb.impl.DvnRJobRequest.java

public List<String> getUpdatedVariableTypesWithBooleanAsString() {
    int[] vt;//  w  w  w . j  av  a2s  .c o  m

    if (hasRecodedVariables()) {
        vt = ArrayUtils.addAll(getVariableTypesWithBoolean(), getRecodedVarTypeSet());
    } else {
        vt = getVariableTypesWithBoolean();
    }

    List<String> vts = new ArrayList<String>();

    for (int i = 0; i < vt.length; i++) {
        vts.add(Integer.toString(vt[i]));
    }
    return vts;
}

From source file:canreg.client.gui.analysis.FrequenciesByYearInternalFrame.java

public void createPivot(String fileName) {
    String[] columnNames = new String[resultTable.getColumnCount()];

    // We need 3 columns to work with        
    if (columnNames.length != 3) {
        return;//  w  w w.  ja v a 2 s .  c  o m
    }
    // TODO Extend this to at least 4
    String[] nextLine = new String[resultTable.getColumnCount()];

    // Find the column names
    for (int j = 0; j < columnNames.length; j++) {
        columnNames[j] = resultTable.getColumnName(j);
    }

    // variable we lock
    String lockVariable = columnNames[1];

    HashMap<String, HashMap> data = new HashMap<String, HashMap>();
    HashMap<String, String> years;
    Set<String> allYears = new TreeSet<String>();

    // load up the data
    for (int i = 0; i < resultTable.getRowCount(); i++) {
        String year, cases, code;

        for (int j = 0; j < nextLine.length; j++) {
            nextLine[j] = resultTable.getValueAt(i, j).toString();
        }
        year = nextLine[0];
        if (year.trim().length() == 0) {
            year = "MISSING";
        }

        allYears.add(year);

        code = nextLine[1];

        if (code.trim().length() == 0) {
            code = "MISSING";
        }

        cases = nextLine[2];

        years = data.get(code);
        if (years == null) {
            years = new HashMap<String, String>();
            data.put(code, years);
        }
        years.put(year, cases);
    }

    // find variables element for the lockedvariable
    DatabaseVariablesListElement lockedDatabaseVariablesListElement = null;
    for (DatabaseVariablesListElement vle : chosenVariables) {
        if (vle.getShortName().compareToIgnoreCase(lockVariable) == 0) {
            lockedDatabaseVariablesListElement = vle;
        }
    }

    if (lockedDatabaseVariablesListElement == null) {
        return;
    } // this happens if user has updated the selection of variables

    int dictionaryID = lockedDatabaseVariablesListElement.getDictionaryID();
    Dictionary dict = null;
    if (dictionaryID >= 0) {
        dict = dictionary.get(dictionaryID);
    }

    String[] allYearsArray = allYears.toArray(new String[0]);
    Writer writer = null;
    try {
        writer = new FileWriter(fileName);

        // Write the column names
        String[] codeArray = { lockedDatabaseVariablesListElement.getFullName(), lockVariable };

        String[] headers = (String[]) ArrayUtils.addAll(codeArray, allYearsArray);

        String[] codes = data.keySet().toArray(new String[0]);
        Arrays.sort(codes);

        CSVFormat format = CSVFormat.DEFAULT.withDelimiter(',').withHeader(headers);

        CSVPrinter csvPrinter = new CSVPrinter(writer, format);

        // write the rows    
        for (String code : codes) {
            LinkedList<String> row = new LinkedList<String>();
            if (dict != null) {
                DictionaryEntry dictionaryEntry = dict.getDictionaryEntry(code);
                if (dictionaryEntry != null) {
                    row.add(dictionaryEntry.getDescription());
                } else {
                    row.add("");
                }
            } else {
                row.add("");
            }
            row.add(code);

            years = data.get(code);
            for (String year : allYears) {
                String cell = years.get(year);
                if (cell == null) {
                    row.add("0");
                } else {
                    row.add(cell);
                }
            }
            csvPrinter.printRecord(row);
        }
        csvPrinter.flush();

    } catch (IOException ex) {
        // JOptionPane.showMessageDialog(this, "File NOT written.\n" + ex.getLocalizedMessage(), "ERROR", JOptionPane.ERROR_MESSAGE);
        Logger.getLogger(FrequenciesByYearInternalFrame.class.getName()).log(Level.SEVERE, null, ex);
    } finally {
        try {
            if (writer != null) {
                writer.close();
            }
        } catch (IOException ex) {
            Logger.getLogger(FrequenciesByYearInternalFrame.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
}

From source file:com.oneops.inductor.WorkOrderExecutor.java

private Set<String> syncServiceCookbooks(CmsWorkOrderSimple wo, String woBomCircuit, String user,
        String[] rsyncCmdLineWithKey, String host, String port, String logKey, String keyFile) {
    logger.info("checking for any service cookbook to be rsynched ..");
    //rsync cloud services cookbooks
    String cloudName = wo.getCloud().getCiName();
    Set<String> serviceCookbookPaths = new HashSet<>();
    Map<String, Map<String, CmsCISimple>> services = wo.getServices();
    if (services != null) {
        for (String serviceName : services.keySet()) { // for each service
            CmsCISimple serviceCi = services.get(serviceName).get(cloudName);
            if (serviceCi != null) {
                String serviceClassName = serviceCi.getCiClassName();
                String serviceCookbookCircuit = getCookbookPath(serviceClassName);
                if (!serviceCookbookCircuit.equals(woBomCircuit)) {
                    //this service class is not in the same circuit as that of the bom ci getting deployed.
                    //Go ahead and include the cookbook of this service to rsync to remote
                    String serviceCookbookBaseDir = config.getCircuitDir().replace("packer",
                            serviceCookbookCircuit);
                    String serviceClassNameShort = serviceClassName
                            .substring(serviceClassName.lastIndexOf(".") + 1);
                    String serviceCookbookPath = serviceCookbookBaseDir + "/components/cookbooks/"
                            + serviceClassNameShort.toLowerCase() + "/";
                    logger.info("service-serviceCookbookPath: " + serviceCookbookPath);
                    if (new File(serviceCookbookPath).exists()) {
                        String serviceCookbookCircuitPath = "/home/" + user + "/" + serviceCookbookCircuit
                                + "/components/cookbooks";
                        serviceCookbookPaths.add(serviceCookbookCircuitPath);
                        String destination = serviceCookbookCircuitPath + "/"
                                + serviceClassNameShort.toLowerCase() + "/";

                        String remoteCmd = "mkdir -p " + destination;
                        String[] cmd = (String[]) ArrayUtils.addAll(sshCmdLine,
                                new String[] { keyFile, "-p " + port, user + "@" + host, remoteCmd });
                        logger.info(logKey + " ### EXEC: " + user + "@" + host + " " + remoteCmd);
                        ProcessResult result = processRunner.executeProcessRetry(
                                new ExecutionContext(wo, cmd, getLogKey(wo), getRetryCountForWorkOrder(wo)));
                        if (result.getResultCode() != 0) {
                            logger.error(logKey + " Error while creating service cookbook directory on remote");
                        }// w  ww . ja  v  a2 s. com

                        String[] cmdLine = (String[]) ArrayUtils.addAll(rsyncCmdLineWithKey,
                                new String[] { serviceCookbookPath, user + "@" + host + ":" + destination });

                        logger.info(logKey + " ### SYNC Service cookbook: " + serviceCookbookPath);

                        if (!host.equals(TEST_HOST)) {
                            result = processRunner
                                    .executeProcessRetry(new ExecutionContext(wo, cmdLine, logKey, retryCount));
                            if (result.getResultCode() > 0) {
                                wo.setComments("FATAL: "
                                        + generateRsyncErrorMessage(result.getResultCode(), host + ":" + port));
                                return null;
                            }
                        }
                    } else {
                        logger.warn("Cookbook " + serviceCookbookPath + " does not exist on this inductor");
                    }
                }
            }
        }
    }
    return serviceCookbookPaths;
}

From source file:au.org.ala.biocache.web.WMSController.java

/**
 * Get bounding box for a query.//from  ww w. j  av  a  2  s . c o m
 *
 * @param requestParams
 * @return
 * @throws Exception
 */
double[] getBBox(SpatialSearchRequestParams requestParams) throws Exception {
    double[] bbox = new double[4];
    String[] sort = { "longitude", "latitude", "longitude", "latitude" };
    String[] dir = { "asc", "asc", "desc", "desc" };

    //remove instances of null longitude or latitude
    String[] fq = (String[]) ArrayUtils.addAll(getFq(requestParams),
            new String[] { "longitude:[* TO *]", "latitude:[* TO *]" });
    requestParams.setFq(fq);
    requestParams.setPageSize(10);

    for (int i = 0; i < sort.length; i++) {
        requestParams.setSort(sort[i]);
        requestParams.setDir(dir[i]);
        requestParams.setFl(sort[i]);

        SolrDocumentList sdl = searchDAO.findByFulltext(requestParams);
        if (sdl != null && sdl.size() > 0) {
            if (sdl.get(0) != null) {
                bbox[i] = (Double) sdl.get(0).getFieldValue(sort[i]);
            } else {
                logger.error("searchDAO.findByFulltext returning SolrDocumentList with null records");
            }
        }
    }
    return bbox;
}

From source file:gda.hrpd.pmac.EpicsCVScanController.java

private double[] get2Theta() {
    double[] x1 = ArrayUtils.EMPTY_DOUBLE_ARRAY;
    try {//from ww w .j  a v  a  2s  .  c o m
        // logger.info("gets MAC1X");
        x1 = getMAC1X();
        // logger.info("gets MAC1X DONE");
    } catch (TimeoutException e) {
        logger.error("Timeout while gets MAC1X", e);
        e.printStackTrace();
    } catch (CAException e) {
        logger.error("CAException while gets MAC1X", e);
        e.printStackTrace();
    } catch (InterruptedException e) {
        logger.error("InterruptedException while gets MAC1X", e);
    }
    double[] x2 = ArrayUtils.EMPTY_DOUBLE_ARRAY;
    try {
        // logger.info("gets MAC2X");
        x2 = getMAC2X();
        // logger.info("gets MAC2X DONE");
    } catch (TimeoutException e) {
        logger.error("Timeout while gets MAC2X", e);
        e.printStackTrace();
    } catch (CAException e) {
        logger.error("CAException while gets MAC2X", e);
        e.printStackTrace();
    } catch (InterruptedException e) {
        logger.error("InterruptedException while gets MAC2X", e);
    }
    double[] x3 = ArrayUtils.EMPTY_DOUBLE_ARRAY;
    try {
        // logger.info("gets MAC3X");
        x3 = getMAC3X();
        // logger.info("gets MAC3X DONE");
    } catch (TimeoutException e) {
        logger.error("Timeout while gets MAC3X", e);
        e.printStackTrace();
    } catch (CAException e) {
        logger.error("CAException while gets MAC3X", e);
        e.printStackTrace();
    } catch (InterruptedException e) {
        logger.error("InterruptedException while gets MAC3X", e);
    }
    double[] x4 = ArrayUtils.EMPTY_DOUBLE_ARRAY;
    try {
        // logger.info("gets MAC4X");
        x4 = getMAC4X();
        // logger.info("gets MAC4X DONE");
    } catch (TimeoutException e) {
        logger.error("Timeout while gets MAC4X", e);
        e.printStackTrace();
    } catch (CAException e) {
        logger.error("CAException while gets MAC4X", e);
        e.printStackTrace();
    } catch (InterruptedException e) {
        logger.error("InterruptedException while gets MAC4X", e);
    }
    double[] x5 = ArrayUtils.EMPTY_DOUBLE_ARRAY;
    try {
        // logger.info("gets MAC5X");
        x5 = getMAC5X();
        // logger.info("gets MAC5X DONE");
    } catch (TimeoutException e) {
        logger.error("Timeout while gets MAC5X", e);
        e.printStackTrace();
    } catch (CAException e) {
        logger.error("CAException while gets MAC5X", e);
        e.printStackTrace();
    } catch (InterruptedException e) {
        logger.error("InterruptedException while gets MAC5X", e);
    }
    return ArrayUtils.subarray(
            ArrayUtils.addAll(ArrayUtils.addAll(ArrayUtils.addAll(ArrayUtils.addAll(x1, x2), x3), x4), x5),
            16500, 305000);
}

From source file:au.org.ala.biocache.dao.SearchDAOImpl.java

/**
 * Writes the index fields to the supplied output stream in CSV format.
 *
 * DM: refactored to split the query by month to improve performance.
 * Further enhancements possible:/*from w  ww.jav  a  2s  .c o  m*/
 * 1) Multi threaded
 * 2) More filtering, by year or decade..
 *
 * @param downloadParams
 * @param out
 * @param includeSensitive
 * @throws Exception
 */
public Map<String, Integer> writeResultsFromIndexToStream(final DownloadRequestParams downloadParams,
        OutputStream out, boolean includeSensitive, final DownloadDetailsDTO dd, boolean checkLimit)
        throws Exception {
    long start = System.currentTimeMillis();
    final Map<String, Integer> uidStats = new HashMap<String, Integer>();
    if (server == null) {
        initServer();
    }
    try {
        SolrQuery solrQuery = new SolrQuery();
        formatSearchQuery(downloadParams);

        String dFields = downloadParams.getFields();

        if (includeSensitive) {
            //include raw latitude and longitudes
            dFields = dFields
                    .replaceFirst("decimalLatitude.p",
                            "sensitive_latitude,sensitive_longitude,decimalLatitude.p")
                    .replaceFirst(",locality,", ",locality,sensitive_locality,");
        }

        StringBuilder sb = new StringBuilder(dFields);
        if (!downloadParams.getExtra().isEmpty()) {
            sb.append(",").append(downloadParams.getExtra());
        }

        String[] requestedFields = sb.toString().split(",");
        List<String>[] indexedFields = downloadFields.getIndexFields(requestedFields);
        logger.debug("Fields included in download: " + indexedFields[0]);
        logger.debug("Fields excluded from download: " + indexedFields[1]);
        logger.debug("The headers in downloads: " + indexedFields[2]);

        //set the fields to the ones that are available in the index
        final String[] fields = indexedFields[0].toArray(new String[] {});
        solrQuery.setFields(fields);
        StringBuilder qasb = new StringBuilder();
        if (!"none".equals(downloadParams.getQa())) {
            solrQuery.addField("assertions");
            if (!"all".equals(downloadParams.getQa())) {
                //add all the qa fields
                qasb.append(downloadParams.getQa());
            }
        }
        solrQuery.addField("institution_uid").addField("collection_uid").addField("data_resource_uid")
                .addField("data_provider_uid");

        //add context information
        updateQueryContext(downloadParams);
        solrQuery.setQuery(buildSpatialQueryString(downloadParams));
        solrQuery.setFacetMinCount(1);
        solrQuery.setFacetLimit(-1);

        //get the assertion facets to add them to the download fields
        boolean getAssertionsFromFacets = "all".equals(downloadParams.getQa());
        SolrQuery monthAssertionsQuery = getAssertionsFromFacets
                ? solrQuery.getCopy().addFacetField("month", "assertions")
                : solrQuery.getCopy().addFacetField("month");
        if (getAssertionsFromFacets) {
            //set the order for the facet to be based on the index - this will force the assertions to be returned in the same order each time
            //based on alphabetical sort.  The number of QA's may change between searches so we can't guarantee that the order won't change
            monthAssertionsQuery.add("f.assertions.facet.sort", "index");
        }
        QueryResponse facetQuery = runSolrQuery(monthAssertionsQuery, downloadParams.getFq(), 0, 0, "score",
                "asc");

        //set the totalrecords for the download details
        dd.setTotalRecords(facetQuery.getResults().getNumFound());
        if (checkLimit && dd.getTotalRecords() < MAX_DOWNLOAD_SIZE) {
            checkLimit = false;
        }

        //get the month facets to add them to the download fields get the assertion facets.
        List<Count> splitByFacet = null;

        for (FacetField facet : facetQuery.getFacetFields()) {
            if (facet.getName().equals("assertions") && facet.getValueCount() > 0) {
                for (FacetField.Count facetEntry : facet.getValues()) {
                    if (qasb.length() > 0)
                        qasb.append(",");
                    qasb.append(facetEntry.getName());
                }
            }
            if (facet.getName().equals("month") && facet.getValueCount() > 0) {
                splitByFacet = facet.getValues();
            }
        }

        String qas = qasb.toString();
        final String[] qaFields = qas.equals("") ? new String[] {} : qas.split(",");
        String[] qaTitles = downloadFields.getHeader(qaFields, false);

        String[] header = org.apache.commons.lang3.ArrayUtils.addAll(indexedFields[2].toArray(new String[] {}),
                qaTitles);

        //construct correct RecordWriter based on the supplied fileType
        final au.org.ala.biocache.RecordWriter rw = downloadParams.getFileType().equals("csv")
                ? new CSVRecordWriter(out, header, downloadParams.getSep(), downloadParams.getEsc())
                : new ShapeFileRecordWriter(downloadParams.getFile(), out,
                        (String[]) ArrayUtils.addAll(fields, qaFields));

        if (rw instanceof ShapeFileRecordWriter) {
            dd.setHeaderMap(((ShapeFileRecordWriter) rw).getHeaderMappings());
        }

        //order the query by _docid_ for faster paging
        solrQuery.addSortField("_docid_", ORDER.asc);

        //for each month create a separate query that pages through 500 records per page
        List<SolrQuery> queries = new ArrayList<SolrQuery>();
        if (splitByFacet != null) {
            for (Count facet : splitByFacet) {
                if (facet.getCount() > 0) {
                    SolrQuery splitByFacetQuery = solrQuery.getCopy()
                            .addFilterQuery(facet.getFacetField().getName() + ":" + facet.getName());
                    splitByFacetQuery.setFacet(false);
                    queries.add(splitByFacetQuery);
                }
            }
            SolrQuery remainderQuery = solrQuery.getCopy()
                    .addFilterQuery("-" + splitByFacet.get(0).getFacetField().getName() + ":[* TO *]");
            queries.add(0, remainderQuery);
        } else {
            queries.add(0, solrQuery);
        }

        //multi-thread the requests...
        ExecutorService pool = Executors.newFixedThreadPool(6);
        Set<Future<Integer>> futures = new HashSet<Future<Integer>>();
        final AtomicInteger resultsCount = new AtomicInteger(0);
        final boolean threadCheckLimit = checkLimit;

        //execute each query, writing the results to stream
        for (final SolrQuery splitByFacetQuery : queries) {
            //define a thread
            Callable<Integer> solrCallable = new Callable<Integer>() {

                int startIndex = 0;

                @Override
                public Integer call() throws Exception {
                    QueryResponse qr = runSolrQuery(splitByFacetQuery, downloadParams.getFq(),
                            downloadBatchSize, startIndex, "_docid_", "asc");
                    int recordsForThread = 0;
                    logger.debug(splitByFacetQuery.getQuery() + " - results: " + qr.getResults().size());

                    while (qr != null && !qr.getResults().isEmpty()) {
                        logger.debug("Start index: " + startIndex + ", " + splitByFacetQuery.getQuery());
                        int count = 0;
                        synchronized (rw) {
                            count = processQueryResults(uidStats, fields, qaFields, rw, qr, dd,
                                    threadCheckLimit, resultsCount);
                            recordsForThread += count;
                        }
                        startIndex += downloadBatchSize;
                        //we have already set the Filter query the first time the query was constructed rerun with he same params but different startIndex
                        if (!threadCheckLimit || resultsCount.intValue() < MAX_DOWNLOAD_SIZE) {
                            if (!threadCheckLimit) {
                                //throttle the download by sleeping
                                try {
                                    Thread.currentThread().sleep(throttle);
                                } catch (InterruptedException e) {
                                    //don't care if the sleep was interrupted
                                }
                            }
                            qr = runSolrQuery(splitByFacetQuery, null, downloadBatchSize, startIndex, "_docid_",
                                    "asc");
                        } else {
                            qr = null;
                        }
                    }
                    return recordsForThread;
                }
            };
            futures.add(pool.submit(solrCallable));
        }

        //check the futures until all have finished
        int totalDownload = 0;
        Set<Future<Integer>> completeFutures = new HashSet<Future<Integer>>();
        boolean allComplete = false;
        while (!allComplete) {
            for (Future future : futures) {
                if (!completeFutures.contains(future)) {
                    if (future.isDone()) {
                        totalDownload += (Integer) future.get();
                        completeFutures.add(future);
                    }
                }
            }
            allComplete = completeFutures.size() == futures.size();
            if (!allComplete) {
                Thread.sleep(1000);
            }
        }
        pool.shutdown();
        rw.finalise();
        out.flush();

        long finish = System.currentTimeMillis();
        long timeTakenInSecs = (finish - start) / 1000;
        if (timeTakenInSecs == 0)
            timeTakenInSecs = 1;
        logger.info("Download of " + resultsCount + " records in " + timeTakenInSecs + " seconds. Record/sec: "
                + resultsCount.intValue() / timeTakenInSecs);

    } catch (SolrServerException ex) {
        logger.error("Problem communicating with SOLR server while processing download. " + ex.getMessage(),
                ex);
    }
    return uidStats;
}

From source file:com.rapidminer.operator.preprocessing.transformation.aggregation.AggregationOperator.java

@Override
public OperatorVersion[] getIncompatibleVersionChanges() {
    return (OperatorVersion[]) ArrayUtils.addAll(super.getIncompatibleVersionChanges(),
            new OperatorVersion[] { VERSION_5_1_6, VERSION_5_2_8, VERSION_6_0_6 });
}

From source file:com.oneops.inductor.WorkOrderExecutor.java

/**
 * Installs base software needed for chef / oneops
 *
 * @param pr      ProcessRunner/*from   w w w. j  a  v  a2  s .c om*/
 * @param wo      CmsWorkOrderSimple
 * @param host    remote host
 * @param port    remote port
 * @param logKey
 * @param keyFile
 */
public void runBaseInstall(ProcessRunner pr, CmsWorkOrderSimple wo, String host, String port, String logKey,
        String keyFile) {

    long t1 = System.currentTimeMillis();
    // amazon public images use ubuntu user for ubuntu os
    String cloudName = wo.getCloud().getCiName();
    String osType = "";
    if (wo.getPayLoad().containsKey("DependsOn")
            && wo.getPayLoad().get("DependsOn").get(0).getCiClassName().contains("Compute"))
        osType = wo.getPayLoad().get("DependsOn").get(0).getCiAttributes().get("ostype");
    else
        osType = wo.getRfcCi().getCiAttributes().get("ostype");

    if (osType.equals("default-cloud")) {

        if (!wo.getServices().containsKey("compute")) {
            wo.setComments("missing compute service");
            return;
        }

        osType = wo.getServices().get("compute").get(cloudName).getCiAttributes().get("ostype");
        logger.info("using default-cloud ostype: " + osType);
    }
    String user = getUserForOsAndCloud(osType, wo);

    String sudo = "";
    if (!user.equals("root"))
        sudo = "sudo ";

    String setup = "";

    // rackspace images don't have rsync installed
    if (wo.getCloud().getCiName().indexOf("rackspace") > -1) {
        setup = "yum -d0 -e0 -y install rsync; apt-get -y install rsync; true; ";
        // fedora in aws needs it too
    } else if (osType.indexOf("edora") > -1) {
        setup = "sudo yum -d0 -e0 -y install rsync; ";
    }

    // make prerequisite dirs for /opt/oneops and cookbooks
    String prepCmdline = setup + sudo + "mkdir -p /opt/oneops/workorder /home/" + user + "/components" + ";"
            + sudo + "chown -R " + user + ":" + user + " /opt/oneops;" + sudo + "chown -R " + user + ":" + user
            + " /home/" + user + "/components";

    // double -t args are needed
    String[] cmd = (String[]) ArrayUtils.addAll(sshInteractiveCmdLine,
            new String[] { keyFile, "-p " + port, user + "@" + host, prepCmdline });

    // retry initial ssh 10x slow hypervisors hosts
    if (!host.equals(TEST_HOST)) {
        ProcessResult result = pr.executeProcessRetry(cmd, logKey, 10);
        if (result.getResultCode() > 0) {
            wo.setComments("failed : can't:" + prepCmdline);
            return;
        }
    }

    // install os package repos - repo_map keyed by os
    ArrayList<String> repoCmdList = new ArrayList<String>();
    if (wo.getServices().containsKey("compute")
            && wo.getServices().get("compute").get(cloudName).getCiAttributes().containsKey("repo_map")
            && wo.getServices().get("compute").get(cloudName).getCiAttributes().get("repo_map")
                    .indexOf(osType) > 0) {

        String repoMap = wo.getServices().get("compute").get(cloudName).getCiAttributes().get("repo_map");
        repoCmdList = getRepoListFromMap(repoMap, osType);
    } else {
        logger.warn("no key in repo_map for os: " + osType);
    }

    // add repo_list from compute
    if (wo.getRfcCi().getCiAttributes().containsKey("repo_list")) {
        repoCmdList.addAll(getRepoList(wo.getRfcCi().getCiAttributes().get("repo_list")));
    }

    if (repoCmdList.size() > 0) {

        String[] cmdTmp = (String[]) ArrayUtils.addAll(sshInteractiveCmdLine,
                new String[] { keyFile, "-p " + port, user + "@" + host });

        // add ";" to each cmd
        for (int i = 0; i < repoCmdList.size(); i++) {
            repoCmdList.set(i, repoCmdList.get(i) + "; ");
        }

        // add infront so env can be set before repo cmds
        repoCmdList.add(0, getProxyEnvVars(wo));

        cmd = (String[]) ArrayUtils.addAll(cmdTmp, repoCmdList.toArray());
        if (!host.equals(TEST_HOST)) {
            ProcessResult result = pr.executeProcessRetry(cmd, logKey, retryCount);
            if (result.getResultCode() > 0) {
                wo.setComments("failed : Replace the compute and retry the deployment");
                wo.putSearchTag(BASE_INSTALL_TIME, Long.toString(System.currentTimeMillis() - t1));
                return;
            }
        }
    }

    // put ci cookbooks. "/" needed to get around symlinks
    String cookbookPath = getCookbookPath(wo.getRfcCi().getCiClassName());
    String cookbook = config.getCircuitDir().replace("packer", cookbookPath) + "/";
    String[] rsyncCmdLineWithKey = rsyncCmdLine.clone();
    rsyncCmdLineWithKey[4] += "-p " + port + " -qi " + keyFile;
    String[] deploy = (String[]) ArrayUtils.addAll(rsyncCmdLineWithKey,
            new String[] { cookbook, user + "@" + host + ":/home/" + user + "/" + cookbookPath });
    if (!host.equals(TEST_HOST)) {
        ProcessResult result = pr.executeProcessRetry(deploy, logKey, retryCount);
        if (result.getResultCode() > 0) {
            wo.setComments("FATAL: " + generateRsyncErrorMessage(result.getResultCode(), host + ":" + port));
            wo.putSearchTag(BASE_INSTALL_TIME, Long.toString(System.currentTimeMillis() - t1));
            return;
        }
    }

    // put shared cookbooks
    cookbook = config.getCircuitDir().replace("packer", "shared") + "/";
    rsyncCmdLineWithKey = rsyncCmdLine.clone();
    rsyncCmdLineWithKey[4] += "-p " + port + " -qi " + keyFile;
    deploy = (String[]) ArrayUtils.addAll(rsyncCmdLineWithKey,
            new String[] { cookbook, user + "@" + host + ":/home/" + user + "/shared" });
    if (!host.equals(TEST_HOST)) {
        ProcessResult result = pr.executeProcessRetry(deploy, logKey, retryCount);
        if (result.getResultCode() > 0) {
            wo.setComments("FATAL: " + generateRsyncErrorMessage(result.getResultCode(), host + ":" + port));
            wo.putSearchTag(BASE_INSTALL_TIME, Long.toString(System.currentTimeMillis() - t1));
            return;
        }
    }

    // install base: oneops user, ruby, chef
    // double -t args are needed
    String[] classParts = wo.getRfcCi().getCiClassName().split("\\.");
    String baseComponent = classParts[classParts.length - 1].toLowerCase();
    String[] cmdTmp = (String[]) ArrayUtils.addAll(sshInteractiveCmdLine,
            new String[] { keyFile, "-p " + port, user + "@" + host, sudo + "/home/" + user + "/" + cookbookPath
                    + "/components/cookbooks/" + baseComponent + "/files/default/install_base.sh" });

    String[] proxyList = new String[] { getProxyBashVars(wo) };
    cmd = (String[]) ArrayUtils.addAll(cmdTmp, proxyList);

    if (!host.equals(TEST_HOST)) {
        ProcessResult result = pr.executeProcessRetry(cmd, logKey, retryCount);
        if (result.getResultCode() > 0) {
            wo.setComments("failed : can't run install_base.sh");
            wo.putSearchTag(BASE_INSTALL_TIME, Long.toString(System.currentTimeMillis() - t1));
            return;
        }
    }
    wo.putSearchTag(BASE_INSTALL_TIME, Long.toString(System.currentTimeMillis() - t1));
}