Example usage for java.util.stream Stream close

List of usage examples for java.util.stream Stream close

Introduction

In this page you can find the example usage for java.util.stream Stream close.

Prototype

@Override
void close();

Source Link

Document

Closes this stream, causing all close handlers for this stream pipeline to be called.

Usage

From source file:com.baeldung.file.FileOperationsTest.java

@Test
public void givenFilePath_whenUsingFilesLines_thenFileData() throws IOException, URISyntaxException {
    String expectedData = "Hello World from fileTest.txt!!!";

    Path path = Paths.get(getClass().getClassLoader().getResource("fileTest.txt").toURI());

    StringBuilder data = new StringBuilder();
    Stream<String> lines = Files.lines(path);
    lines.forEach(line -> data.append(line).append("\n"));
    lines.close();

    Assert.assertEquals(expectedData, data.toString().trim());
}

From source file:com.baeldung.file.FileOperationsManualTest.java

@Test
public void givenFilePath_whenUsingFilesLines_thenFileData() throws IOException, URISyntaxException {
    String expectedData = "Hello World from fileTest.txt!!!";

    Path path = Paths.get(getClass().getClassLoader().getResource("fileTest.txt").toURI());

    StringBuilder data = new StringBuilder();
    Stream<String> lines = Files.lines(path);
    lines.forEach(line -> data.append(line).append("\n"));
    lines.close();

    assertEquals(expectedData, data.toString().trim());
}

From source file:com.spotify.heroic.shell.task.DeleteKeys.java

private AsyncFuture<Void> askForOk(final ShellIO io, final Stream<BackendKey> keys) {
    io.out().println("Examples of keys that would have been deleted (use --ok to " + "perform):");

    keys.limit(100).forEach(k -> {/*from   ww w.ja  v a 2  s. c  om*/
        io.out().println(k.toString());
    });

    keys.close();
    return async.resolved();
}

From source file:net.geoprism.localization.LocaleManager.java

private Collection<Locale> loadCLDRs() {
    try {/*from   w  w  w.  j a va  2 s . c om*/

        // Get the list of known CLDR locale
        Set<Locale> locales = new HashSet<Locale>();

        Set<String> paths = new HashSet<String>();

        URL resource = this.getClass().getResource("/cldr/main");
        URI uri = resource.toURI();

        if (uri.getScheme().equals("jar")) {
            FileSystem fileSystem = FileSystems.newFileSystem(uri, new HashMap<String, Object>());
            Path path = fileSystem.getPath("/cldr/main");

            Stream<Path> walk = Files.walk(path, 1);

            try {
                for (Iterator<Path> it = walk.iterator(); it.hasNext();) {
                    Path location = it.next();

                    paths.add(location.toAbsolutePath().toString());
                }
            } finally {
                walk.close();
            }
        } else {
            String url = resource.getPath();
            File root = new File(url);

            File[] files = root.listFiles(new DirectoryFilter());

            if (files != null) {
                for (File file : files) {
                    paths.add(file.getAbsolutePath());
                }
            }
        }

        for (String path : paths) {
            File file = new File(path);

            String filename = file.getName();

            locales.add(LocaleManager.getLocaleForName(filename));
        }

        return locales;
    } catch (Exception e) {
        throw new ProgrammingErrorException(e);
    }
}

From source file:objective.taskboard.utils.ZipUtils.java

public static void unzip(Stream<ZipStreamEntry> stream, Path output) {
    if (output.toFile().isFile())
        throw new RuntimeException("Output must be a directory");

    try {/*  w w w. j a v a  2s .c o m*/
        stream.forEach(ze -> {
            Path entryPath = output.resolve(ze.getName());
            try {
                if (ze.isDirectory()) {
                    createDirectories(entryPath);
                } else {
                    createDirectories(entryPath.getParent());
                    copy(ze.getInputStream(), entryPath);
                }
            } catch (IOException e) {
                throw new UncheckedIOException(e);
            }
        });
    } finally {
        stream.close();
    }
}

From source file:objective.taskboard.utils.ZipUtils.java

public static void zip(Stream<ZipStreamEntry> stream, OutputStream outputStream) {
    try (ZipOutputStream zipOutputStream = new ZipOutputStream(outputStream)) {
        stream.forEach(ze -> {//from  ww  w .  j  av  a2 s .  c  o  m
            ZipEntry newEntry = new ZipEntry(ze.getName());
            try {
                zipOutputStream.putNextEntry(newEntry);
                IOUtils.copy(ze.getInputStream(), zipOutputStream);
            } catch (IOException e) {
                throw new UncheckedIOException(e);
            }
        });
    } catch (IOException e) {
        throw new UncheckedIOException(e);
    } finally {
        stream.close();
    }
}

From source file:org.apache.hadoop.hive.ql.MetaStoreDumpUtility.java

public static void setupMetaStoreTableColumnStatsFor30TBTPCDSWorkload(HiveConf conf, String tmpBaseDir) {
    Connection conn = null;//  www . ja  va2 s  .com

    try {
        Properties props = new Properties(); // connection properties
        props.put("user", conf.get("javax.jdo.option.ConnectionUserName"));
        props.put("password", conf.get("javax.jdo.option.ConnectionPassword"));
        String url = conf.get("javax.jdo.option.ConnectionURL");
        conn = DriverManager.getConnection(url, props);
        ResultSet rs = null;
        Statement s = conn.createStatement();

        if (LOG.isDebugEnabled()) {
            LOG.debug("Connected to metastore database ");
        }

        String mdbPath = HiveTestEnvSetup.HIVE_ROOT + "/data/files/tpcds-perf/metastore_export/";

        // Setup the table column stats
        BufferedReader br = new BufferedReader(new FileReader(new File(
                HiveTestEnvSetup.HIVE_ROOT + "/metastore/scripts/upgrade/derby/022-HIVE-11107.derby.sql")));
        String command;

        s.execute("DROP TABLE APP.TABLE_PARAMS");
        s.execute("DROP TABLE APP.TAB_COL_STATS");
        // Create the column stats table
        while ((command = br.readLine()) != null) {
            if (!command.endsWith(";")) {
                continue;
            }
            if (LOG.isDebugEnabled()) {
                LOG.debug("Going to run command : " + command);
            }
            PreparedStatement psCommand = conn.prepareStatement(command.substring(0, command.length() - 1));
            psCommand.execute();
            psCommand.close();
            if (LOG.isDebugEnabled()) {
                LOG.debug("successfully completed " + command);
            }
        }
        br.close();

        java.nio.file.Path tabColStatsCsv = FileSystems.getDefault().getPath(mdbPath, "csv",
                "TAB_COL_STATS.txt.bz2");
        java.nio.file.Path tabParamsCsv = FileSystems.getDefault().getPath(mdbPath, "csv",
                "TABLE_PARAMS.txt.bz2");

        // Set up the foreign key constraints properly in the TAB_COL_STATS data
        java.nio.file.Path tmpFileLoc1 = FileSystems.getDefault().getPath(tmpBaseDir, "TAB_COL_STATS.txt");
        java.nio.file.Path tmpFileLoc2 = FileSystems.getDefault().getPath(tmpBaseDir, "TABLE_PARAMS.txt");

        class MyComp implements Comparator<String> {
            @Override
            public int compare(String str1, String str2) {
                if (str2.length() != str1.length()) {
                    return str2.length() - str1.length();
                }
                return str1.compareTo(str2);
            }
        }

        final SortedMap<String, Integer> tableNameToID = new TreeMap<String, Integer>(new MyComp());

        rs = s.executeQuery("SELECT * FROM APP.TBLS");
        while (rs.next()) {
            String tblName = rs.getString("TBL_NAME");
            Integer tblId = rs.getInt("TBL_ID");
            tableNameToID.put(tblName, tblId);

            if (LOG.isDebugEnabled()) {
                LOG.debug("Resultset : " + tblName + " | " + tblId);
            }
        }

        final Map<String, Map<String, String>> data = new HashMap<>();
        rs = s.executeQuery("select TBLS.TBL_NAME, a.COLUMN_NAME, a.TYPE_NAME from  "
                + "(select COLUMN_NAME, TYPE_NAME, SDS.SD_ID from APP.COLUMNS_V2 join APP.SDS on SDS.CD_ID = COLUMNS_V2.CD_ID) a"
                + " join APP.TBLS on  TBLS.SD_ID = a.SD_ID");
        while (rs.next()) {
            String tblName = rs.getString(1);
            String colName = rs.getString(2);
            String typeName = rs.getString(3);
            Map<String, String> cols = data.get(tblName);
            if (null == cols) {
                cols = new HashMap<>();
            }
            cols.put(colName, typeName);
            data.put(tblName, cols);
        }

        BufferedReader reader = new BufferedReader(new InputStreamReader(
                new BZip2CompressorInputStream(Files.newInputStream(tabColStatsCsv, StandardOpenOption.READ))));

        Stream<String> replaced = reader.lines().parallel().map(str -> {
            String[] splits = str.split(",");
            String tblName = splits[0];
            String colName = splits[1];
            Integer tblID = tableNameToID.get(tblName);
            StringBuilder sb = new StringBuilder(
                    "default@" + tblName + "@" + colName + "@" + data.get(tblName).get(colName) + "@");
            for (int i = 2; i < splits.length; i++) {
                sb.append(splits[i] + "@");
            }
            // Add tbl_id and empty bitvector
            return sb.append(tblID).append("@").toString();
        });

        Files.write(tmpFileLoc1, (Iterable<String>) replaced::iterator);
        replaced.close();
        reader.close();

        BufferedReader reader2 = new BufferedReader(new InputStreamReader(
                new BZip2CompressorInputStream(Files.newInputStream(tabParamsCsv, StandardOpenOption.READ))));
        final Map<String, String> colStats = new ConcurrentHashMap<>();
        Stream<String> replacedStream = reader2.lines().parallel().map(str -> {
            String[] splits = str.split("_@");
            String tblName = splits[0];
            Integer tblId = tableNameToID.get(tblName);
            Map<String, String> cols = data.get(tblName);
            StringBuilder sb = new StringBuilder();
            sb.append("{\"COLUMN_STATS\":{");
            for (String colName : cols.keySet()) {
                sb.append("\"" + colName + "\":\"true\",");
            }
            sb.append("},\"BASIC_STATS\":\"true\"}");
            colStats.put(tblId.toString(), sb.toString());

            return tblId.toString() + "@" + splits[1];
        });

        Files.write(tmpFileLoc2, (Iterable<String>) replacedStream::iterator);
        Files.write(tmpFileLoc2,
                (Iterable<String>) colStats.entrySet().stream()
                        .map(map -> map.getKey() + "@COLUMN_STATS_ACCURATE@" + map.getValue())::iterator,
                StandardOpenOption.APPEND);

        replacedStream.close();
        reader2.close();
        // Load the column stats and table params with 30 TB scale
        String importStatement1 = "CALL SYSCS_UTIL.SYSCS_IMPORT_TABLE(null, '" + "TAB_COL_STATS" + "', '"
                + tmpFileLoc1.toAbsolutePath().toString() + "', '@', null, 'UTF-8', 1)";
        String importStatement2 = "CALL SYSCS_UTIL.SYSCS_IMPORT_TABLE(null, '" + "TABLE_PARAMS" + "', '"
                + tmpFileLoc2.toAbsolutePath().toString() + "', '@', null, 'UTF-8', 1)";

        PreparedStatement psImport1 = conn.prepareStatement(importStatement1);
        if (LOG.isDebugEnabled()) {
            LOG.debug("Going to execute : " + importStatement1);
        }
        psImport1.execute();
        psImport1.close();
        if (LOG.isDebugEnabled()) {
            LOG.debug("successfully completed " + importStatement1);
        }
        PreparedStatement psImport2 = conn.prepareStatement(importStatement2);
        if (LOG.isDebugEnabled()) {
            LOG.debug("Going to execute : " + importStatement2);
        }
        psImport2.execute();
        psImport2.close();
        if (LOG.isDebugEnabled()) {
            LOG.debug("successfully completed " + importStatement2);
        }

        s.execute("ALTER TABLE APP.TAB_COL_STATS ADD COLUMN CAT_NAME VARCHAR(256)");
        s.execute("update APP.TAB_COL_STATS set CAT_NAME = '" + Warehouse.DEFAULT_CATALOG_NAME + "'");

        s.close();

        conn.close();

    } catch (Exception e) {
        throw new RuntimeException("error while loading tpcds metastore dump", e);
    }
}

From source file:org.apache.hadoop.hive.ql.QTestUtil.java

public static void setupMetaStoreTableColumnStatsFor30TBTPCDSWorkload(HiveConf conf) {
    Connection conn = null;/*w w  w .  j a  v  a2 s .  c om*/
    ArrayList<Statement> statements = new ArrayList<Statement>(); // list of Statements, PreparedStatements

    try {
        Properties props = new Properties(); // connection properties
        props.put("user", conf.get("javax.jdo.option.ConnectionUserName"));
        props.put("password", conf.get("javax.jdo.option.ConnectionPassword"));
        conn = DriverManager.getConnection(conf.get("javax.jdo.option.ConnectionURL"), props);
        ResultSet rs = null;
        Statement s = conn.createStatement();

        if (LOG.isDebugEnabled()) {
            LOG.debug("Connected to metastore database ");
        }

        String mdbPath = AbstractCliConfig.HIVE_ROOT + "/data/files/tpcds-perf/metastore_export/";

        // Setup the table column stats
        BufferedReader br = new BufferedReader(new FileReader(new File(
                AbstractCliConfig.HIVE_ROOT + "/metastore/scripts/upgrade/derby/022-HIVE-11107.derby.sql")));
        String command;

        s.execute("DROP TABLE APP.TABLE_PARAMS");
        s.execute("DROP TABLE APP.TAB_COL_STATS");
        // Create the column stats table
        while ((command = br.readLine()) != null) {
            if (!command.endsWith(";")) {
                continue;
            }
            if (LOG.isDebugEnabled()) {
                LOG.debug("Going to run command : " + command);
            }
            try {
                PreparedStatement psCommand = conn.prepareStatement(command.substring(0, command.length() - 1));
                statements.add(psCommand);
                psCommand.execute();
                if (LOG.isDebugEnabled()) {
                    LOG.debug("successfully completed " + command);
                }
            } catch (SQLException e) {
                LOG.info("Got SQL Exception " + e.getMessage());
            }
        }
        br.close();

        java.nio.file.Path tabColStatsCsv = FileSystems.getDefault().getPath(mdbPath, "csv",
                "TAB_COL_STATS.txt.bz2");
        java.nio.file.Path tabParamsCsv = FileSystems.getDefault().getPath(mdbPath, "csv",
                "TABLE_PARAMS.txt.bz2");

        // Set up the foreign key constraints properly in the TAB_COL_STATS data
        String tmpBaseDir = System.getProperty(TEST_TMP_DIR_PROPERTY);
        java.nio.file.Path tmpFileLoc1 = FileSystems.getDefault().getPath(tmpBaseDir, "TAB_COL_STATS.txt");
        java.nio.file.Path tmpFileLoc2 = FileSystems.getDefault().getPath(tmpBaseDir, "TABLE_PARAMS.txt");

        class MyComp implements Comparator<String> {
            @Override
            public int compare(String str1, String str2) {
                if (str2.length() != str1.length()) {
                    return str2.length() - str1.length();
                }
                return str1.compareTo(str2);
            }
        }

        final SortedMap<String, Integer> tableNameToID = new TreeMap<String, Integer>(new MyComp());

        rs = s.executeQuery("SELECT * FROM APP.TBLS");
        while (rs.next()) {
            String tblName = rs.getString("TBL_NAME");
            Integer tblId = rs.getInt("TBL_ID");
            tableNameToID.put(tblName, tblId);

            if (LOG.isDebugEnabled()) {
                LOG.debug("Resultset : " + tblName + " | " + tblId);
            }
        }

        final Map<String, Map<String, String>> data = new HashMap<>();
        rs = s.executeQuery("select TBLS.TBL_NAME, a.COLUMN_NAME, a.TYPE_NAME from  "
                + "(select COLUMN_NAME, TYPE_NAME, SDS.SD_ID from APP.COLUMNS_V2 join APP.SDS on SDS.CD_ID = COLUMNS_V2.CD_ID) a"
                + " join APP.TBLS on  TBLS.SD_ID = a.SD_ID");
        while (rs.next()) {
            String tblName = rs.getString(1);
            String colName = rs.getString(2);
            String typeName = rs.getString(3);
            Map<String, String> cols = data.get(tblName);
            if (null == cols) {
                cols = new HashMap<>();
            }
            cols.put(colName, typeName);
            data.put(tblName, cols);
        }

        BufferedReader reader = new BufferedReader(new InputStreamReader(
                new BZip2CompressorInputStream(Files.newInputStream(tabColStatsCsv, StandardOpenOption.READ))));

        Stream<String> replaced = reader.lines().parallel().map(str -> {
            String[] splits = str.split(",");
            String tblName = splits[0];
            String colName = splits[1];
            Integer tblID = tableNameToID.get(tblName);
            StringBuilder sb = new StringBuilder(
                    "default@" + tblName + "@" + colName + "@" + data.get(tblName).get(colName) + "@");
            for (int i = 2; i < splits.length; i++) {
                sb.append(splits[i] + "@");
            }
            // Add tbl_id and empty bitvector
            return sb.append(tblID).append("@").toString();
        });

        Files.write(tmpFileLoc1, (Iterable<String>) replaced::iterator);
        replaced.close();
        reader.close();

        BufferedReader reader2 = new BufferedReader(new InputStreamReader(
                new BZip2CompressorInputStream(Files.newInputStream(tabParamsCsv, StandardOpenOption.READ))));
        final Map<String, String> colStats = new ConcurrentHashMap<>();
        Stream<String> replacedStream = reader2.lines().parallel().map(str -> {
            String[] splits = str.split("_@");
            String tblName = splits[0];
            Integer tblId = tableNameToID.get(tblName);
            Map<String, String> cols = data.get(tblName);
            StringBuilder sb = new StringBuilder();
            sb.append("{\"COLUMN_STATS\":{");
            for (String colName : cols.keySet()) {
                sb.append("\"" + colName + "\":\"true\",");
            }
            sb.append("},\"BASIC_STATS\":\"true\"}");
            colStats.put(tblId.toString(), sb.toString());

            return tblId.toString() + "@" + splits[1];
        });

        Files.write(tmpFileLoc2, (Iterable<String>) replacedStream::iterator);
        Files.write(tmpFileLoc2,
                (Iterable<String>) colStats.entrySet().stream()
                        .map(map -> map.getKey() + "@COLUMN_STATS_ACCURATE@" + map.getValue())::iterator,
                StandardOpenOption.APPEND);

        replacedStream.close();
        reader2.close();
        // Load the column stats and table params with 30 TB scale
        String importStatement1 = "CALL SYSCS_UTIL.SYSCS_IMPORT_TABLE(null, '" + "TAB_COL_STATS" + "', '"
                + tmpFileLoc1.toAbsolutePath().toString() + "', '@', null, 'UTF-8', 1)";
        String importStatement2 = "CALL SYSCS_UTIL.SYSCS_IMPORT_TABLE(null, '" + "TABLE_PARAMS" + "', '"
                + tmpFileLoc2.toAbsolutePath().toString() + "', '@', null, 'UTF-8', 1)";
        try {
            PreparedStatement psImport1 = conn.prepareStatement(importStatement1);
            if (LOG.isDebugEnabled()) {
                LOG.debug("Going to execute : " + importStatement1);
            }
            statements.add(psImport1);
            psImport1.execute();
            if (LOG.isDebugEnabled()) {
                LOG.debug("successfully completed " + importStatement1);
            }
            PreparedStatement psImport2 = conn.prepareStatement(importStatement2);
            if (LOG.isDebugEnabled()) {
                LOG.debug("Going to execute : " + importStatement2);
            }
            statements.add(psImport2);
            psImport2.execute();
            if (LOG.isDebugEnabled()) {
                LOG.debug("successfully completed " + importStatement2);
            }
        } catch (SQLException e) {
            LOG.info("Got SQL Exception  " + e.getMessage());
        }
    } catch (FileNotFoundException e1) {
        LOG.info("Got File not found Exception " + e1.getMessage());
    } catch (IOException e1) {
        LOG.info("Got IOException " + e1.getMessage());
    } catch (SQLException e1) {
        LOG.info("Got SQLException " + e1.getMessage());
    } finally {
        // Statements and PreparedStatements
        int i = 0;
        while (!statements.isEmpty()) {
            // PreparedStatement extend Statement
            Statement st = statements.remove(i);
            try {
                if (st != null) {
                    st.close();
                    st = null;
                }
            } catch (SQLException sqle) {
            }
        }

        //Connection
        try {
            if (conn != null) {
                conn.close();
                conn = null;
            }
        } catch (SQLException sqle) {
        }
    }
}

From source file:org.apache.tajo.cli.tsql.TajoCli.java

private Collection<String> getKeywords() {
    // SQL reserved keywords
    Stream<String> tokens = Arrays.stream(SQLLexer.tokenNames);
    Stream<String> rules = Arrays.stream(SQLLexer.ruleNames);

    List<String> keywords = Stream.concat(tokens, rules)
            .filter((str) -> str.matches("[A-Z_]+") && str.length() > 1).distinct().map(String::toLowerCase)
            .collect(Collectors.toList());

    // DB and table names
    for (String db : client.getAllDatabaseNames()) {
        keywords.add(db);//  ww w.ja  v a 2s  .c  o  m
        keywords.addAll(client.getTableList(db));
    }

    tokens.close();
    rules.close();

    return keywords;
}

From source file:org.g_node.mergers.LktMergerJenaTest.java

@Test
public void testPlainMergeAndSave() throws Exception {
    final String useCase = "lkt";
    final Path outputFile = this.testFileFolder.resolve("out.ttl");

    final String[] cliArgs = new String[7];
    cliArgs[0] = useCase;/*  w  ww  .j a  v  a  2s.  c o m*/
    cliArgs[1] = "-m";
    cliArgs[2] = this.testMainRdfFile.getAbsolutePath();
    cliArgs[3] = "-i";
    cliArgs[4] = this.testMergeRdfFile.getAbsolutePath();
    cliArgs[5] = "-o";
    cliArgs[6] = outputFile.toString();

    App.main(cliArgs);
    assertThat(Files.exists(outputFile)).isTrue();

    final Stream<String> fileStream = Files.lines(outputFile);
    final List<String> readFile = fileStream.collect(Collectors.toList());
    assertThat(readFile.size()).isEqualTo(5);
    fileStream.close();
}