Example usage for java.nio.file Files readAllLines

List of usage examples for java.nio.file Files readAllLines

Introduction

In this page you can find the example usage for java.nio.file Files readAllLines.

Prototype

public static List<String> readAllLines(Path path, Charset cs) throws IOException 

Source Link

Document

Read all lines from a file.

Usage

From source file:Test.java

public static void main(String[] args) throws IOException {
    Path path = Paths.get("/home/docs/users.txt");
    List<String> contents = Files.readAllLines(path, Charset.defaultCharset());
    for (String b : contents) {
        System.out.println(b);/*from  ww  w  .  j  a  va 2s  .  co m*/
    }

}

From source file:Main.java

public static void main(String[] args) throws Exception {
    Charset cs = Charset.forName("US-ASCII");
    Path source = Paths.get("test1.txt");

    List<String> lines = Files.readAllLines(source, cs);
    for (String line : lines) {
        System.out.println(line);
    }//from   w ww  . j av  a2 s.  co  m
}

From source file:TestReadCustData.java

public static void main(String[] args) {
    AgentLoader.loadAgentFromClasspath("avaje-ebeanorm-agent", "debug=1");
    List<Customer> lCust = new ArrayList<>();
    try {/*from  w  w w.  jav a 2  s  .  c o m*/
        List<String> s = Files.readAllLines(Paths.get("customer.txt"), Charsets.ISO_8859_1);

        for (String s1 : s) {
            if (!s1.startsWith("GRUP")) {
                Customer c = new Customer();
                try {
                    c.setId(Long.parseLong(s1.split("~")[3]));
                } catch (ArrayIndexOutOfBoundsException arrex) {
                    c.setId(Long.parseLong(s1.split("~")[3]));
                }
                try {
                    c.setNama(s1.split("~")[4]);
                } catch (ArrayIndexOutOfBoundsException arrex) {
                    c.setNama("");
                }
                try {
                    c.setShipto(s1.split("~")[9]);
                } catch (ArrayIndexOutOfBoundsException arrex) {
                    c.setShipto("");
                }
                try {
                    c.setKota(s1.split("~")[12]);
                } catch (ArrayIndexOutOfBoundsException arrex) {
                    c.setKota("");
                }
                try {
                    c.setProvinsi(s1.split("~")[13]);
                } catch (ArrayIndexOutOfBoundsException arrex) {
                    c.setProvinsi("");
                }
                try {
                    c.setKodePos(s1.split("~")[14]);
                } catch (ArrayIndexOutOfBoundsException arrex) {
                    c.setKodePos("");
                }
                try {
                    c.setNamaArea(s1.split("~")[2]);
                } catch (ArrayIndexOutOfBoundsException arrex) {
                    c.setNamaArea("");
                }
                try {
                    c.setDKLK(s1.split("~")[15]);
                } catch (ArrayIndexOutOfBoundsException arrex) {
                    c.setDKLK("");
                }
                try {
                    c.setCreditLimit(Long.parseLong(s1.split("~")[16]));
                } catch (ArrayIndexOutOfBoundsException arrex) {
                    c.setCreditLimit(new Long(0));
                }
                try {
                    c.setNpwp(s1.split("~")[11]);
                } catch (ArrayIndexOutOfBoundsException arrex) {
                    c.setNpwp("");
                }
                try {
                    c.setNamaWajibPajak(s1.split("~")[10]);
                } catch (ArrayIndexOutOfBoundsException arrex) {
                    c.setNamaWajibPajak("");
                }
                try {
                    c.setCreationDate(s1.split("~")[6]);
                } catch (ArrayIndexOutOfBoundsException arrex) {
                    c.setCreationDate("");
                }
                try {
                    c.setLastUpdateBy(s1.split("~")[17]);
                } catch (ArrayIndexOutOfBoundsException arrex) {
                    c.setLastUpdateBy("");
                }
                try {
                    c.setLastUpdateDate(s1.split("~")[18]);
                } catch (ArrayIndexOutOfBoundsException arrex) {
                    c.setLastUpdateDate("");
                }

                lCust.add(c);
            }

        }

        for (Customer c : lCust) {

            Customer cc = Ebean.find(Customer.class, c.getId());
            if (cc != null) {
                cc = c;
                Ebean.update(cc);
            }

            System.out.print(c.getId());
            System.out.print(" | ");
            System.out.print(c.getNama());
            System.out.print(" | ");
            System.out.print(c.getShipto());
            System.out.print(" | ");
            System.out.print(c.getNpwp());
            System.out.print(" | ");
            System.out.print(c.getNamaWajibPajak());
            System.out.println();
        }

    } catch (IOException ex) {
        Logger.getLogger(TestReadCustData.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:org.niord.core.ChartConverter.java

public static void main(String[] args) throws IOException {

    String csvPath = "/Users/carolus/Downloads/charts.csv";
    String resultPath = "/Users/carolus/Downloads/charts.json";

    List<SystemChartVo> charts = new ArrayList<>();

    Files.readAllLines(Paths.get(csvPath), Charset.forName("UTF-8")).forEach(line -> {
        String[] fields = line.split(";");

        SystemChartVo chart = new SystemChartVo();
        chart.setChartNumber(fields[0].split(" ")[1].trim());
        if (StringUtils.isNotBlank(fields[1]) && StringUtils.isNumeric(fields[1])) {
            chart.setInternationalNumber(Integer.valueOf(fields[1]));
        }/*  w ww .j  a  v  a  2 s  .  c  o m*/

        chart.setName(StringUtils.defaultIfBlank(fields[3], ""));

        if (StringUtils.isNotBlank(fields[4]) && StringUtils.isNumeric(fields[4])) {
            chart.setScale(Integer.valueOf(fields[4]));
        }

        if (!"Ukendt / Unknown".equals(fields[5])) {
            chart.setHorizontalDatum(StringUtils.defaultIfBlank(fields[5], ""));
        }

        Double south = parsePos(fields[6]);
        Double west = -parsePos(fields[7]);
        Double north = parsePos(fields[8]);
        Double east = -parsePos(fields[9]);

        double[][] coords = new double[][] { { east, north }, { east, south }, { west, south }, { west, north },
                { east, north }, };
        PolygonVo geometry = new PolygonVo();
        geometry.setCoordinates(new double[][][] { coords });
        GeoJsonUtils.roundCoordinates(geometry, 8);
        chart.setGeometry(geometry);

        charts.add(chart);
    });

    ObjectMapper mapper = new ObjectMapper();

    System.out.println(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(charts));

    try (BufferedWriter writer = Files.newBufferedWriter(Paths.get(resultPath), StandardCharsets.UTF_8)) {
        //writer.write("\uFEFF");
        writer.write(mapper.writerWithDefaultPrettyPrinter().writeValueAsString(charts));
    }
}

From source file:org.apache.kylin.engine.streaming.diagnose.StreamingLogAnalyzer.java

public static void main(String[] args) {
    int errorFileCount = 0;
    List<Long> ellapsedTimes = Lists.newArrayList();

    String patternStr = "(\\d{2}/\\d{2}/\\d{2} \\d{2}:\\d{2}:\\d{2})";
    Pattern pattern = Pattern.compile(patternStr);

    SimpleDateFormat format = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
    format.setTimeZone(TimeZone.getTimeZone("GMT")); // NOTE: this must be GMT to calculate epoch date correctly

    Preconditions.checkArgument(args.length == 1, "Usage: StreamingLogsAnalyser streaming_logs_folder");
    for (File file : FileUtils.listFiles(new File(args[0]), new String[] { "log" }, false)) {
        System.out.println("Processing file " + file.toString());

        long startTime = 0;
        long endTime = 0;
        try {//from w  w w. j a v a2 s.co  m
            List<String> contents = Files.readAllLines(file.toPath(), Charset.defaultCharset());
            for (int i = 0; i < contents.size(); ++i) {
                Matcher m = pattern.matcher(contents.get(i));
                if (m.find()) {
                    startTime = format.parse("20" + m.group(1)).getTime();
                    break;
                }
            }

            for (int i = contents.size() - 1; i >= 0; --i) {
                Matcher m = pattern.matcher(contents.get(i));
                if (m.find()) {
                    endTime = format.parse("20" + m.group(1)).getTime();
                    break;
                }
            }

            if (startTime == 0 || endTime == 0) {
                throw new RuntimeException("start time or end time is not found");
            }

            if (endTime - startTime < 60000) {
                System.out.println("Warning: this job took less than one minute!!!! " + file.toString());
            }

            ellapsedTimes.add(endTime - startTime);

        } catch (Exception e) {
            System.out.println("Exception when processing log file " + file.toString());
            System.out.println(e);
            errorFileCount++;
        }
    }

    System.out.println("Totally error files count " + errorFileCount);
    System.out.println("Totally normal files processed " + ellapsedTimes.size());

    long sum = 0;
    for (Long x : ellapsedTimes) {
        sum += x;
    }
    System.out.println("Avg build time " + (sum / ellapsedTimes.size()));
}

From source file:org.apache.storm.sql.StormSqlRunner.java

public static void main(String[] args) throws Exception {
    Options options = buildOptions();/*from   www  . j a v a2s.com*/
    CommandLineParser parser = new DefaultParser();
    CommandLine commandLine = parser.parse(options, args);

    if (!commandLine.hasOption(OPTION_SQL_FILE_LONG)) {
        printUsageAndExit(options, OPTION_SQL_FILE_LONG + " is required");
    }

    String filePath = commandLine.getOptionValue(OPTION_SQL_FILE_LONG);
    List<String> stmts = Files.readAllLines(Paths.get(filePath), StandardCharsets.UTF_8);
    StormSql sql = StormSql.construct();
    @SuppressWarnings("unchecked")
    Map<String, Object> conf = Utils.readStormConfig();

    if (commandLine.hasOption(OPTION_SQL_EXPLAIN_LONG)) {
        sql.explain(stmts);
    } else if (commandLine.hasOption(OPTION_SQL_TOPOLOGY_NAME_LONG)) {
        String topoName = commandLine.getOptionValue(OPTION_SQL_TOPOLOGY_NAME_LONG);
        SubmitOptions submitOptions = new SubmitOptions(TopologyInitialStatus.ACTIVE);
        sql.submit(topoName, stmts, conf, submitOptions, null, null);
    } else {
        printUsageAndExit(options, "Either " + OPTION_SQL_TOPOLOGY_NAME_LONG + " or " + OPTION_SQL_EXPLAIN_LONG
                + " must be presented");
    }
}

From source file:org.languagetool.dev.bigdata.AutomaticConfusionRuleEvaluatorFilter.java

public static void main(String[] args) throws IOException {
    if (args.length != 1) {
        System.out.println("Usage: " + AutomaticConfusionRuleEvaluatorFilter.class.getSimpleName() + " <file>");
        System.out.println("       <file> is the output of " + AutomaticConfusionRuleEvaluator.class.getName());
        System.exit(0);// ww w .  jav a2 s .c  o  m
    }
    List<String> lines = Files.readAllLines(Paths.get(args[0]), Charset.forName("utf-8"));
    String prevKey = null;
    int skippedCount = 0;
    int lowPrecisionCount = 0;
    int lowOccurrenceCount = 0;
    int usedCount = 0;
    boolean skipping = false;
    for (String line : lines) {
        if (!line.startsWith("=>")) {
            continue;
        }
        String[] parts = line.replaceFirst("=> ", "").replaceFirst("; \\d.*", "").split("; ");
        String key = parts[0] + ";" + parts[1];
        Pattern data = Pattern.compile("^(.+?); (.+?);.*p=(\\d\\.\\d+), r=(\\d\\.\\d+), (\\d+)\\+(\\d+),.*");
        Matcher m = data.matcher(line.replaceFirst("=> ", ""));
        m.find();
        String word1 = m.group(1);
        String word2 = m.group(2);
        String wordGroup = word1 + "; " + word2;
        if (word1.compareTo(word2) > 0) {
            wordGroup = word2 + "; " + word1;
        }
        float precision = Float.parseFloat(m.group(3));
        int occ1 = Integer.parseInt(m.group(5));
        int occ2 = Integer.parseInt(m.group(6));
        if (prevKey != null && key.equals(prevKey)) {
            if (skipping) {
                //System.out.println("SKIP: " + reformat(line));
            }
        } else {
            if (precision < MIN_PRECISION) {
                lowPrecisionCount++;
                skippedCount++;
                skipping = true;
                continue;
            }
            if (occ1 < MIN_OCCURRENCES || occ2 < MIN_OCCURRENCES) {
                lowOccurrenceCount++;
                skippedCount++;
                skipping = true;
                continue;
            }
            System.out.println(reformat(line.replaceFirst("=> .+?; .+?; ", wordGroup + "; ")));
            skipping = false;
            usedCount++;
        }
        prevKey = key;
    }
    System.err.println("Skipped: " + skippedCount);
    System.err.println("lowPrecisionCount: " + lowPrecisionCount);
    System.err.println("lowOccurrences: " + lowOccurrenceCount);
    System.err.println("Used: " + usedCount);
}

From source file:es.upm.oeg.tools.rdfshapes.utils.CadinalityResultGenerator.java

public static void main(String[] args) throws Exception {

    String endpoint = "http://3cixty.eurecom.fr/sparql";

    List<String> classList = Files.readAllLines(Paths.get(classListPath), Charset.defaultCharset());

    String classPropertyQueryString = readFile(classPropertyQueryPath, Charset.defaultCharset());
    String propertyCardinalityQueryString = readFile(propertyCardinalityQueryPath, Charset.defaultCharset());
    String individualCountQueryString = readFile(individualCountQueryPath, Charset.defaultCharset());

    DecimalFormat df = new DecimalFormat("0.0000");

    //Create the Excel workbook and sheet
    XSSFWorkbook wb = new XSSFWorkbook();
    XSSFSheet sheet = wb.createSheet("Cardinality");

    int currentExcelRow = 0;
    int classStartRow = 0;

    for (String clazz : classList) {

        Map<String, String> litMap = new HashMap<>();
        Map<String, String> iriMap = ImmutableMap.of("class", clazz);

        String queryString = bindQueryString(individualCountQueryString,
                ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap));

        int individualCount;
        List<RDFNode> c = executeQueryForList(queryString, endpoint, "c");
        if (c.size() == 1) {
            individualCount = c.get(0).asLiteral().getInt();
        } else {// www  . j  a  v a2  s  .  c o  m
            continue;
        }

        // If there are zero individuals, continue
        if (individualCount == 0) {
            throw new IllegalStateException("Check whether " + classListPath + " and " + endpoint + " match.");
        }

        //            System.out.println("***");
        //            System.out.println("### **" + clazz + "** (" + individualCount + ")");
        //            System.out.println("***");
        //            System.out.println();

        classStartRow = currentExcelRow;
        XSSFRow row = sheet.createRow(currentExcelRow);
        XSSFCell cell = row.createCell(0);
        cell.setCellValue(clazz);
        cell.getCellStyle().setAlignment(CellStyle.ALIGN_CENTER);

        queryString = bindQueryString(classPropertyQueryString,
                ImmutableMap.of(IRI_BINDINGS, iriMap, LITERAL_BINDINGS, litMap));

        List<RDFNode> nodeList = executeQueryForList(queryString, endpoint, "p");

        for (RDFNode property : nodeList) {
            if (property.isURIResource()) {

                DescriptiveStatistics stats = new DescriptiveStatistics();

                String propertyURI = property.asResource().getURI();
                //                    System.out.println("* " + propertyURI);
                //                    System.out.println();

                XSSFRow propertyRow = sheet.getRow(currentExcelRow);
                if (propertyRow == null) {
                    propertyRow = sheet.createRow(currentExcelRow);
                }
                currentExcelRow++;

                XSSFCell propertyCell = propertyRow.createCell(1);
                propertyCell.setCellValue(propertyURI);

                Map<String, String> litMap2 = new HashMap<>();
                Map<String, String> iriMap2 = ImmutableMap.of("class", clazz, "p", propertyURI);

                queryString = bindQueryString(propertyCardinalityQueryString,
                        ImmutableMap.of(IRI_BINDINGS, iriMap2, LITERAL_BINDINGS, litMap2));

                List<Map<String, RDFNode>> solnMaps = executeQueryForList(queryString, endpoint,
                        ImmutableSet.of("card", "count"));

                int sum = 0;
                List<CardinalityCount> cardinalityList = new ArrayList<>();
                if (solnMaps.size() > 0) {

                    for (Map<String, RDFNode> soln : solnMaps) {
                        int count = soln.get("count").asLiteral().getInt();
                        int card = soln.get("card").asLiteral().getInt();

                        for (int i = 0; i < count; i++) {
                            stats.addValue(card);
                        }

                        CardinalityCount cardinalityCount = new CardinalityCount(card, count,
                                (((double) count) / individualCount) * 100);
                        cardinalityList.add(cardinalityCount);
                        sum += count;
                    }

                    // Check for zero cardinality instances
                    int count = individualCount - sum;
                    if (count > 0) {
                        for (int i = 0; i < count; i++) {
                            stats.addValue(0);
                        }
                        CardinalityCount cardinalityCount = new CardinalityCount(0, count,
                                (((double) count) / individualCount) * 100);
                        cardinalityList.add(cardinalityCount);
                    }
                }

                Map<Integer, Double> cardMap = new HashMap<>();
                for (CardinalityCount count : cardinalityList) {
                    cardMap.put(count.getCardinality(), count.getPrecentage());
                }

                XSSFCell instanceCountCell = propertyRow.createCell(2);
                instanceCountCell.setCellValue(individualCount);

                XSSFCell minCell = propertyRow.createCell(3);
                minCell.setCellValue(stats.getMin());

                XSSFCell maxCell = propertyRow.createCell(4);
                maxCell.setCellValue(stats.getMax());

                XSSFCell p1 = propertyRow.createCell(5);
                p1.setCellValue(stats.getPercentile(1));

                XSSFCell p99 = propertyRow.createCell(6);
                p99.setCellValue(stats.getPercentile(99));

                XSSFCell mean = propertyRow.createCell(7);
                mean.setCellValue(df.format(stats.getMean()));

                for (int i = 0; i < 21; i++) {
                    XSSFCell dataCell = propertyRow.createCell(8 + i);
                    Double percentage = cardMap.get(i);
                    if (percentage != null) {
                        dataCell.setCellValue(df.format(percentage));
                    } else {
                        dataCell.setCellValue(0);
                    }
                }

                //                    System.out.println("| Min Card. |Max Card. |");
                //                    System.out.println("|---|---|");
                //                    System.out.println("| ? | ? |");
                //                    System.out.println();

            }
        }

        //System.out.println("class start: " + classStartRow + ", class end: " + (currentExcelRow -1));
        //We have finished writting properties of one class, now it's time to merge the cells
        int classEndRow = currentExcelRow - 1;
        if (classStartRow < classEndRow) {
            sheet.addMergedRegion(new CellRangeAddress(classStartRow, classEndRow, 0, 0));
        }

    }

    String filename = "3cixty.xls";
    FileOutputStream fileOut = new FileOutputStream(filename);
    wb.write(fileOut);
    fileOut.close();
}

From source file:com.mapr.PurchaseLog.java

public static void main(String[] args) throws IOException {
    Options opts = new Options();
    CmdLineParser parser = new CmdLineParser(opts);
    try {//  w w  w  .j  av a 2  s. c o  m
        parser.parseArgument(args);
    } catch (CmdLineException e) {
        System.err.println("Usage: -count <number>G|M|K [ -users number ]  log-file user-profiles");
        return;
    }

    Joiner withTab = Joiner.on("\t");

    // first generate lots of user definitions
    SchemaSampler users = new SchemaSampler(
            Resources.asCharSource(Resources.getResource("user-schema.txt"), Charsets.UTF_8).read());
    File userFile = File.createTempFile("user", "tsv");
    BufferedWriter out = Files.newBufferedWriter(userFile.toPath(), Charsets.UTF_8);
    for (int i = 0; i < opts.users; i++) {
        out.write(withTab.join(users.sample()));
        out.newLine();
    }
    out.close();

    // now generate a session for each user
    Splitter onTabs = Splitter.on("\t");
    Splitter onComma = Splitter.on(",");

    Random gen = new Random();
    SchemaSampler intermediate = new SchemaSampler(
            Resources.asCharSource(Resources.getResource("hit_step.txt"), Charsets.UTF_8).read());

    final int COUNTRY = users.getFieldNames().indexOf("country");
    final int CAMPAIGN = intermediate.getFieldNames().indexOf("campaign_list");
    final int SEARCH_TERMS = intermediate.getFieldNames().indexOf("search_keywords");
    Preconditions.checkState(COUNTRY >= 0, "Need country field in user schema");
    Preconditions.checkState(CAMPAIGN >= 0, "Need campaign_list field in step schema");
    Preconditions.checkState(SEARCH_TERMS >= 0, "Need search_keywords field in step schema");

    out = Files.newBufferedWriter(new File(opts.out).toPath(), Charsets.UTF_8);

    for (String line : Files.readAllLines(userFile.toPath(), Charsets.UTF_8)) {
        long t = (long) (TimeUnit.MILLISECONDS.convert(30, TimeUnit.DAYS) * gen.nextDouble());
        List<String> user = Lists.newArrayList(onTabs.split(line));

        // pick session length
        int n = (int) Math.floor(-30 * Math.log(gen.nextDouble()));

        for (int i = 0; i < n; i++) {
            // time on page
            int dt = (int) Math.floor(-20000 * Math.log(gen.nextDouble()));
            t += dt;

            // hit specific values
            JsonNode step = intermediate.sample();

            // check for purchase
            double p = 0.01;
            List<String> campaigns = Lists.newArrayList(onComma.split(step.get("campaign_list").asText()));
            List<String> keywords = Lists.newArrayList(onComma.split(step.get("search_keywords").asText()));
            if ((user.get(COUNTRY).equals("us") && campaigns.contains("5"))
                    || (user.get(COUNTRY).equals("jp") && campaigns.contains("7")) || keywords.contains("homer")
                    || keywords.contains("simpson")) {
                p = 0.5;
            }

            String events = gen.nextDouble() < p ? "1" : "-";

            out.write(Long.toString(t));
            out.write("\t");
            out.write(line);
            out.write("\t");
            out.write(withTab.join(step));
            out.write("\t");
            out.write(events);
            out.write("\n");
        }
    }
    out.close();
}

From source file:org.mitre.mpf.app.ComponentRegistrationApp.java

/**
 * Register one or more components. Updates the Algorithms.xml, Actions.xml, Tasks.xml, Pipelines.xml,
 * nodeServicesPalette.json, and nodeManagerConfig.xml files.
 *
 * @param args args[0] contains the path to the cpp component list;
 *             args[1] contains the number of services per component that should be configured
 *             args[2] contains the node manager hostname to use in nodeManagerConfig.xml
 *             args[3] (optional) contains the string of user-specified components
 *//*from  ww w  .j a  va  2s  .co  m*/
public static void main(String[] args) {

    // NOTE: "-DcppComponents=<blank>" is the same as not providing the option

    if (args.length != 3 && args.length != 4) {
        System.err.println("Usage: java " + ComponentRegistrationApp.class.getSimpleName()
                + " component-list-file num-services-per-component node-manager-hostname [\"componentA,componentB,componentC,...\"]");
        System.exit(-1);
    }

    String componentListPath = args[0];
    if (!Files.exists(Paths.get(componentListPath))) {
        System.err.println("Cannot read: " + componentListPath);
        System.exit(-1);
    }

    List<String> componentPaths = null;
    try {
        componentPaths = Files.readAllLines(Paths.get(componentListPath), StandardCharsets.UTF_8);
    } catch (IOException e) {
        e.printStackTrace();
        System.exit(-1);
    }

    int numServicesPerComponent = 1;
    try {
        numServicesPerComponent = Integer.parseInt(args[1]);
    } catch (NumberFormatException e) {
        e.printStackTrace();
        System.exit(-1);
    }

    String nodeManagerHostname = args[2];

    if (args.length == 4) {
        String componentsSpecified = args[3];
        List<String> componentsSpecifiedList = Arrays.asList(componentsSpecified.split(","));

        List<String> componentsSpecifiedInFileList = componentPaths.stream()
                .map(c -> FilenameUtils.getBaseName(c)).collect(Collectors.toList());

        // sanity check
        if (!componentsSpecifiedInFileList.containsAll(componentsSpecifiedList)) {
            System.err.println(
                    "The specified components " + componentsSpecifiedList + " are not a subset of those in "
                            + componentListPath + " " + componentsSpecifiedInFileList + ".");
            System.err.println("Do a full MPF clean and build.");
            System.exit(-1);
        }

        // filter out components that were built, but should not be registered
        componentPaths = componentPaths.stream()
                .filter(c -> componentsSpecifiedList.contains(FilenameUtils.getBaseName(c)))
                .collect(Collectors.toList());
    }

    // performance optimization: load the application context once
    ApplicationContext context = new ClassPathXmlApplicationContext("applicationContext-minimal.xml");
    AutowireCapableBeanFactory beanFactory = context.getAutowireCapableBeanFactory();

    AddComponentService addComponentService = context.getBean(AddComponentService.class);
    beanFactory.autowireBean(addComponentService);

    NodeManagerService nodeManagerService = context.getBean(NodeManagerService.class);
    beanFactory.autowireBean(nodeManagerService);

    for (String componentPath : componentPaths) {

        // TODO: Handle caffe in the same way as the other components, then remove this check.
        // TODO: Ansible should prompt the user to install each and every component in the deployment package.
        String componentName = FilenameUtils.getBaseName(componentPath);
        if (componentName.equals("caffeComponent")) {
            continue;
        }

        String descriptorPath = componentPath + "/descriptor.json";
        System.out.println("Registering: " + descriptorPath);

        try {
            // update Algorithms.xml, Actions.xml, Tasks.xml, Pipelines.xml, and nodeServicesPalette.json
            addComponentService.registerDeployedComponent(descriptorPath);
        } catch (Exception e) {
            e.printStackTrace();
            System.exit(-1); // kill the build if anything goes wrong
        }
    }

    if (numServicesPerComponent > 0) {
        Map<String, ServiceModel> nodeServiceModels = nodeManagerService.getServiceModels();

        for (ServiceModel serviceModel : nodeServiceModels.values()) {
            serviceModel.setServiceCount(numServicesPerComponent);
        }

        NodeManagerModel nodeManagerModel = new NodeManagerModel(nodeManagerHostname);
        nodeManagerModel.setServices(new ArrayList(nodeServiceModels.values()));

        List<NodeManagerModel> nodeManagerModels = new ArrayList<NodeManagerModel>();
        nodeManagerModels.add(nodeManagerModel);

        try {
            // update nodeManagerConfig.xml
            nodeManagerService.saveNodeManagerConfig(nodeManagerModels, false); // don't reload NodeManagerStatus
        } catch (Exception e) {
            e.printStackTrace();
            System.exit(-1); // kill the build if anything goes wrong
        }
    }
}