Example usage for org.apache.commons.io IOUtils readLines

List of usage examples for org.apache.commons.io IOUtils readLines

Introduction

In this page you can find the example usage for org.apache.commons.io IOUtils readLines.

Prototype

public static List readLines(Reader input) throws IOException 

Source Link

Document

Get the contents of a Reader as a list of Strings, one entry per line.

Usage

From source file:org.apache.james.mime4j.message.MessageTest.java

public void testAddHeaderWriteTo() throws Exception {
    String headerName = "testheader";
    String headerValue = "testvalue";
    String testheader = headerName + ": " + headerValue;

    byte[] inputByte = getRawMessageAsByteArray();

    Message m = new Message(new ByteArrayInputStream(inputByte));
    m.getHeader().addField(AbstractField.parse(testheader));

    assertEquals("header added", m.getHeader().getField(headerName).getBody(), headerValue);

    ByteArrayOutputStream out = new ByteArrayOutputStream();
    m.writeTo(out);//from   w  w  w.  j  a  v a2  s  .c o  m
    List<?> lines = IOUtils.readLines(
            (new BufferedReader(new InputStreamReader(new ByteArrayInputStream(out.toByteArray())))));

    assertTrue("header added", lines.contains(testheader));
}

From source file:org.apache.kylin.storage.hbase.cube.v2.filter.MassInValueProviderImpl.java

public MassInValueProviderImpl(Functions.FilterTableType filterTableType, String filterResourceIdentifier,
        DimensionEncoding encoding) {//from w w w .j  a  v  a 2  s  .c om

    if (filterTableType == Functions.FilterTableType.HDFS) {

        logger.info("Start to load HDFS filter table from " + filterResourceIdentifier);
        Stopwatch stopwatch = new Stopwatch().start();

        FileSystem fileSystem = null;
        try {
            synchronized (hdfs_caches) {

                // directly create hbase configuration here due to no KYLIN_CONF definition.
                fileSystem = FileSystem.get(HBaseConfiguration.create());

                long modificationTime = fileSystem.getFileStatus(new Path(filterResourceIdentifier))
                        .getModificationTime();
                Pair<Long, Set<ByteArray>> cached = hdfs_caches.getIfPresent(filterResourceIdentifier);
                if (cached != null && cached.getFirst().equals(modificationTime)) {
                    ret = cached.getSecond();
                    logger.info("Load HDFS from cache using " + stopwatch.elapsedMillis() + " millis");
                    return;
                }

                InputStream inputStream = fileSystem.open(new Path(filterResourceIdentifier));
                List<String> lines = IOUtils.readLines(inputStream);

                logger.info("Load HDFS finished after " + stopwatch.elapsedMillis() + " millis");

                for (String line : lines) {
                    if (StringUtils.isEmpty(line)) {
                        continue;
                    }

                    try {
                        ByteArray byteArray = ByteArray.allocate(encoding.getLengthOfEncoding());
                        encoding.encode(line.getBytes(), line.getBytes().length, byteArray.array(), 0);
                        ret.add(byteArray);
                    } catch (Exception e) {
                        logger.warn("Error when encoding the filter line " + line);
                    }
                }

                hdfs_caches.put(filterResourceIdentifier, Pair.newPair(modificationTime, ret));

                logger.info("Mass In values constructed after " + stopwatch.elapsedMillis()
                        + " millis, containing " + ret.size() + " entries");
            }

        } catch (IOException e) {
            throw new RuntimeException("error when loading the mass in values", e);
        }
    } else {
        throw new RuntimeException("HBASE_TABLE FilterTableType Not supported yet");
    }
}

From source file:org.apache.marmotta.kiwi.loader.pgsql.KiWiPostgresHandler.java

@Override
protected void dropIndexes() throws SQLException {
    try {//from  w  w  w  . j a  v a 2  s  . com
        ScriptRunner runner = new ScriptRunner(connection.getJDBCConnection(), false, false);

        StringBuilder script = new StringBuilder();
        for (String line : IOUtils
                .readLines(KiWiPostgresHandler.class.getResourceAsStream("drop_indexes.sql"))) {
            if (!line.startsWith("--")) {
                script.append(line);
                script.append(" ");
            }
        }
        log.debug("PostgreSQL: running SQL script '{}'", script.toString());
        runner.runScript(new StringReader(script.toString()));
    } catch (IOException ex) {
        throw new SQLException("error while dropping indexes", ex);
    }
}

From source file:org.apache.marmotta.kiwi.loader.pgsql.KiWiPostgresHandler.java

@Override
protected void createIndexes() throws SQLException {
    try {/*from  w w w. ja v  a 2s . com*/
        ScriptRunner runner = new ScriptRunner(connection.getJDBCConnection(), false, false);

        StringBuilder script = new StringBuilder();
        for (String line : IOUtils
                .readLines(KiWiPostgresHandler.class.getResourceAsStream("create_indexes.sql"))) {
            if (!line.startsWith("--")) {
                script.append(line);
                script.append(" ");
            }
        }
        log.debug("PostgreSQL: running SQL script '{}'", script.toString());
        runner.runScript(new StringReader(script.toString()));
    } catch (IOException ex) {
        throw new SQLException("error while creating indexes", ex);
    }
}

From source file:org.apache.marmotta.kiwi.sparql.sail.KiWiSparqlSail.java

private void prepareFulltext(KiWiConfiguration configuration) {
    try {//  ww  w . ja va2s  .  c  o m
        if (configuration.isFulltextEnabled()) {
            KiWiConnection connection = parent.getPersistence().getConnection();
            try {
                if (configuration.getDialect() instanceof PostgreSQLDialect) {

                    // for postgres, we need to create
                    // - a stored procedure for mapping ISO language codes to PostgreSQL fulltext configuration names
                    // - if languages are not null, for each configured language as well as for the generic configuration
                    //   an index over nodes.svalue

                    ScriptRunner runner = new ScriptRunner(connection.getJDBCConnection(), false, false);
                    if (connection.getMetadata("ft.lookup") == null) {
                        log.info("PostgreSQL: creating language configuration lookup function");
                        StringBuilder script = new StringBuilder();
                        for (String line : IOUtils.readLines(PostgreSQLDialect.class
                                .getResourceAsStream("create_fulltext_langlookup.sql"))) {
                            if (!line.startsWith("--")) {
                                script.append(line);
                                script.append(" ");
                            }
                        }
                        log.debug("PostgreSQL: running SQL script '{}'", script.toString());
                        runner.runScript(new StringReader(script.toString()));
                    }

                    // language specific indexes
                    if (configuration.getFulltextLanguages() != null) {
                        StringBuilder script = new StringBuilder();
                        for (String line : IOUtils.readLines(
                                PostgreSQLDialect.class.getResourceAsStream("create_fulltext_index.sql"))) {
                            if (!line.startsWith("--")) {
                                script.append(line);
                                script.append(" ");
                            }
                        }
                        for (String lang : configuration.getFulltextLanguages()) {
                            if (connection.getMetadata("ft.idx." + lang) == null) {
                                String pg_configuration = POSTGRES_LANG_MAPPINGS.get(lang);
                                if (pg_configuration != null) {
                                    log.info("PostgreSQL: creating fulltext index for language {}", lang);
                                    String script_lang = script.toString().replaceAll("@LANGUAGE@", lang)
                                            .replaceAll("@CONFIGURATION@", pg_configuration);
                                    log.debug("PostgreSQL: running SQL script '{}'", script_lang);
                                    runner.runScript(new StringReader(script_lang));
                                }
                            }
                        }
                    }

                    // generic index
                    if (configuration.getFulltextLanguages() != null) {
                        if (connection.getMetadata("ft.idx.generic") == null) {
                            StringBuilder script = new StringBuilder();
                            for (String line : IOUtils.readLines(PostgreSQLDialect.class
                                    .getResourceAsStream("create_fulltext_index_generic.sql"))) {
                                if (!line.startsWith("--")) {
                                    script.append(line);
                                    script.append(" ");
                                }
                            }
                            log.info("PostgreSQL: creating generic fulltext index ");
                            log.debug("PostgreSQL: running SQL script '{}'", script.toString());
                            runner.runScript(new StringReader(script.toString()));
                        }
                    }

                    /*
                    } else if(configuration.getDialect() instanceof MySQLDialect) {
                            
                    // for MySQL, just create a fulltext index (no language support)
                    if(connection.getMetadata("ft.idx") == null) {
                    ScriptRunner runner = new ScriptRunner(connection.getJDBCConnection(), false, false);
                    String script = IOUtils.toString(MySQLDialect.class.getResourceAsStream("create_fulltext_index.sql"));
                    log.info("MySQL: creating generic fulltext index ");
                    log.debug("MySQL: running SQL script '{}'", script.toString());
                    runner.runScript(new StringReader(script));
                    }
                    /*
                    } else if(configuration.getDialect() instanceof H2Dialect) {
                            
                    // for H2, just create a fulltext index (no language support)
                    if(connection.getMetadata("fulltext.index") == null) {
                    ScriptRunner runner = new ScriptRunner(connection.getJDBCConnection(), false, false);
                    String script = IOUtils.toString(H2Dialect.class.getResourceAsStream("create_fulltext_index.sql"));
                    runner.runScript(new StringReader(script));
                    }
                    */
                }
            } finally {
                connection.close();
            }
        }
    } catch (IOException | SQLException ex) {
        log.error("error while preparing fulltext support", ex);
    }
}

From source file:org.apache.metron.dataloads.extractor.stix.StixExtractorTest.java

@Before
public void setup() throws IOException {
    stixDoc = Joiner.on("\n")
            .join(IOUtils.readLines(new FileReader(new File("src/test/resources/stix_example.xml"))));
    stixDocWithoutCondition = Joiner.on("\n").join(
            IOUtils.readLines(new FileReader(new File("src/test/resources/stix_example_wo_conditions.xml"))));
}

From source file:org.apache.metron.maas.service.runner.Runner.java

public static void main(String... argv) throws Exception {
    CommandLine cli = RunnerOptions.parse(new PosixParser(), argv);
    String zkQuorum = RunnerOptions.ZK_QUORUM.get(cli);
    String zkRoot = RunnerOptions.ZK_ROOT.get(cli);
    String script = RunnerOptions.SCRIPT.get(cli);
    String name = RunnerOptions.NAME.get(cli);
    String version = RunnerOptions.VERSION.get(cli);
    String containerId = RunnerOptions.CONTAINER_ID.get(cli);
    String hostname = RunnerOptions.HOSTNAME.get(cli);
    CuratorFramework client = null;//w  ww  .  j a  va  2  s  . c  o m

    LOG.error("Running script " + script);
    LOG.info("Local Directory Contents");
    for (File f : new File(".").listFiles()) {
        LOG.info("  " + f.getName());
    }
    try {
        RetryPolicy retryPolicy = new ExponentialBackoffRetry(1000, 3);
        client = CuratorFrameworkFactory.newClient(zkQuorum, retryPolicy);
        client.start();
        MaaSConfig config = ConfigUtil.INSTANCE.read(client, zkRoot, new MaaSConfig(), MaaSConfig.class);
        JsonInstanceSerializer<ModelEndpoint> serializer = new JsonInstanceSerializer<>(ModelEndpoint.class);
        try {
            serviceDiscovery = ServiceDiscoveryBuilder.builder(ModelEndpoint.class).client(client)
                    .basePath(config.getServiceRoot()).serializer(serializer).build();
        } finally {
        }
        LOG.info("Created service @ " + config.getServiceRoot());

        serviceDiscovery.start();

        File cwd = new File(script).getParentFile();
        final String cmd = new File(cwd, script).getAbsolutePath();
        try {
            p = new ProcessBuilder(cmd).directory(cwd).start();

        } catch (Exception e) {
            LOG.info("Unable to execute " + cmd + " from " + new File(".").getAbsolutePath());
            LOG.error(e.getMessage(), e);
            throw new IllegalStateException(e.getMessage(), e);
        }

        try {
            LOG.info("Started " + cmd);
            Endpoint ep = readEndpoint(cwd);
            URL endpointUrl = correctLocalUrl(hostname, ep.getUrl());
            ep.setUrl(endpointUrl.toString());
            LOG.info("Read endpoint " + ep);
            ModelEndpoint endpoint = new ModelEndpoint();
            {
                endpoint.setName(name);
                endpoint.setContainerId(containerId);
                endpoint.setEndpoint(ep);
                endpoint.setVersion(version);
            }
            ;
            ServiceInstanceBuilder<ModelEndpoint> builder = ServiceInstance.<ModelEndpoint>builder()
                    .address(endpointUrl.getHost()).id(containerId).name(name).port(endpointUrl.getPort())
                    .registrationTimeUTC(System.currentTimeMillis()).serviceType(ServiceType.STATIC)
                    .payload(endpoint);
            final ServiceInstance<ModelEndpoint> instance = builder.build();
            try {
                LOG.info("Installing service instance: " + instance + " at " + serviceDiscovery);
                serviceDiscovery.registerService(instance);
                LOG.info("Installed instance " + name + ":" + version + "@" + endpointUrl);
            } catch (Throwable t) {
                LOG.error("Unable to install instance " + name + ":" + version + "@" + endpointUrl, t);
            }

            Runtime.getRuntime().addShutdownHook(new Thread() {
                @Override
                public void run() {
                    LOG.info("KILLING CONTAINER PROCESS...");
                    if (p != null) {
                        LOG.info("Process destroyed forcibly");
                        p.destroyForcibly();
                    }
                }
            });
        } finally {
            if (p.waitFor() != 0) {
                String stderr = Joiner.on("\n").join(IOUtils.readLines(p.getErrorStream()));
                String stdout = Joiner.on("\n").join(IOUtils.readLines(p.getInputStream()));
                throw new IllegalStateException(
                        "Unable to execute " + script + ".  Stderr is: " + stderr + "\nStdout is: " + stdout);
            }
        }
    } finally {
        if (serviceDiscovery != null) {
            CloseableUtils.closeQuietly(serviceDiscovery);
        }
        if (client != null) {
            CloseableUtils.closeQuietly(client);
        }
    }
}

From source file:org.apache.nifi.toolkit.tls.standalone.TlsToolkitStandaloneTest.java

private void checkClientCert(String clientDn, X509Certificate rootCert) throws Exception {
    String clientDnFile = TlsToolkitStandalone.getClientDnFile(CertificateUtils.reorderDn(clientDn));
    String password;//  www  . j ava2 s  .  c om
    try (FileReader fileReader = new FileReader(new File(tempDir, clientDnFile + ".password"))) {
        List<String> lines = IOUtils.readLines(fileReader);
        assertEquals(1, lines.size());
        password = lines.get(0);
    }

    KeyStore keyStore = KeyStoreUtils.getKeyStore(KeystoreType.PKCS12.toString());
    try (FileInputStream fileInputStream = new FileInputStream(new File(tempDir, clientDnFile + ".p12"))) {
        keyStore.load(fileInputStream, password.toCharArray());
    }
    PrivateKey privateKey = (PrivateKey) keyStore.getKey(TlsToolkitStandalone.NIFI_KEY, new char[0]);
    Certificate[] certificateChain = keyStore.getCertificateChain(TlsToolkitStandalone.NIFI_KEY);
    assertEquals(2, certificateChain.length);
    assertEquals(rootCert, certificateChain[1]);
    certificateChain[1].verify(rootCert.getPublicKey());
    certificateChain[0].verify(rootCert.getPublicKey());
    PublicKey publicKey = certificateChain[0].getPublicKey();
    TlsCertificateAuthorityTest.assertPrivateAndPublicKeyMatch(privateKey, publicKey);

}

From source file:org.apache.pig.tools.DownloadResolver.java

private DownloadResolver() {
    if (System.getProperty("grape.config") != null) {
        LOG.info("Using ivysettings file from " + System.getProperty("grape.config"));
    } else {/*  w  ww.  java  2s .  com*/
        // Retrieve the ivysettings configuration file
        Map<String, String> envMap = System.getenv();
        File confFile = null;
        // Check for configuration file in PIG_CONF_DIR
        if (envMap.containsKey("PIG_CONF_DIR")) {
            confFile = new File(new File(envMap.get("PIG_CONF_DIR")).getPath(), IVY_FILE_NAME);
        }

        // Check for configuration file in PIG_HOME if not found in PIG_CONF_DIR
        if (confFile == null || !confFile.exists()) {
            confFile = new File(new File(envMap.get("PIG_HOME"), "conf").getPath(), IVY_FILE_NAME);
        }

        // Check for configuration file in Classloader if not found in PIG_CONF_DIR and PIG_HOME
        if (confFile == null || !confFile.exists()) {
            ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
            if (classLoader.getResource(IVY_FILE_NAME) != null) {
                LOG.info("Found ivysettings file in classpath");
                confFile = new File(classLoader.getResource(IVY_FILE_NAME).getFile());

                if (!confFile.exists()) {
                    // ivysettings file resides inside a jar
                    try {
                        List<String> ivyLines = IOUtils
                                .readLines(classLoader.getResourceAsStream(IVY_FILE_NAME));
                        confFile = File.createTempFile("ivysettings", ".xml");
                        confFile.deleteOnExit();
                        for (String str : ivyLines) {
                            FileUtils.writeStringToFile(confFile, str, true);
                        }
                    } catch (Exception e) {
                        LOG.warn("Could not create an ivysettings file from resource");
                    }
                }
            }
        }

        // Set the Configuration file
        if (confFile != null && confFile.exists()) {
            LOG.info("Using ivysettings file from " + confFile.toString());
            System.setProperty("grape.config", confFile.toString());
        } else {
            LOG.warn("Could not find custom ivysettings file in PIG_CONF_DIR or PIG_HOME or classpath.");
        }
    }
}

From source file:org.apache.sling.ide.eclipse.ui.internal.ImportRepositoryContentAction.java

private void parseIgnoreFiles(IFolder folder, String path) throws IOException, CoreException {
    // TODO - the parsing should be extracted
    IResource vltIgnore = folder.findMember(".vltignore");
    if (vltIgnore != null && vltIgnore instanceof IFile) {

        logger.trace("Found ignore file at {0}", vltIgnore.getFullPath());

        try (InputStream contents = ((IFile) vltIgnore).getContents()) {
            List<String> ignoreLines = IOUtils.readLines(contents);
            for (String ignoreLine : ignoreLines) {
                logger.trace("Registering ignore rule {0}:{1}", path, ignoreLine);
                ignoredResources.registerRegExpIgnoreRule(path, ignoreLine);
            }//from w  w w.  jav  a 2s  .co  m
        }
    }
}