Example usage for org.apache.commons.io FileUtils writeLines

List of usage examples for org.apache.commons.io FileUtils writeLines

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils writeLines.

Prototype

public static void writeLines(File file, Collection lines) throws IOException 

Source Link

Document

Writes the toString() value of each item in a collection to the specified File line by line.

Usage

From source file:org.apache.druid.indexer.BatchDeltaIngestionTest.java

@Test
public void testDeltaIngestion() throws Exception {
    File tmpDir = temporaryFolder.newFolder();

    File dataFile1 = new File(tmpDir, "data1");
    FileUtils.writeLines(dataFile1, ImmutableList.of("2014102200,a.example.com,a.example.com,90",
            "2014102201,b.example.com,b.example.com,25"));

    File dataFile2 = new File(tmpDir, "data2");
    FileUtils.writeLines(dataFile2, ImmutableList.of("2014102202,c.example.com,c.example.com,70"));

    //using a hadoop glob path to test that it continues to work with hadoop MultipleInputs usage and not
    //affected by
    //https://issues.apache.org/jira/browse/MAPREDUCE-5061
    String inputPath = tmpDir.getPath() + "/{data1,data2}";

    List<WindowedDataSegment> segments = ImmutableList.of(new WindowedDataSegment(SEGMENT, INTERVAL_FULL));

    HadoopDruidIndexerConfig config = makeHadoopDruidIndexerConfig(ImmutableMap.of("type", "multi", "children",
            ImmutableList.of(ImmutableMap.of("type", "dataSource", "ingestionSpec",
                    ImmutableMap.of("dataSource", "testds", "interval", INTERVAL_FULL), "segments", segments),
                    ImmutableMap.<String, Object>of("type", "static", "paths", inputPath))),
            temporaryFolder.newFolder());

    List<ImmutableMap<String, Object>> expectedRows = ImmutableList.of(
            ImmutableMap.of("time", DateTimes.of("2014-10-22T00:00:00.000Z"), "host",
                    ImmutableList.of("a.example.com"), "visited_sum", 190L, "unique_hosts", 1.0d),
            ImmutableMap.of("time", DateTimes.of("2014-10-22T01:00:00.000Z"), "host",
                    ImmutableList.of("b.example.com"), "visited_sum", 175L, "unique_hosts", 1.0d),
            ImmutableMap.of("time", DateTimes.of("2014-10-22T02:00:00.000Z"), "host",
                    ImmutableList.of("c.example.com"), "visited_sum", 270L, "unique_hosts", 1.0d));

    testIngestion(config, expectedRows, Iterables.getOnlyElement(segments), ImmutableList.of("host"),
            ImmutableList.of("visited_sum", "unique_hosts"));
}

From source file:org.apache.druid.indexer.DeterminePartitionsJobTest.java

public DeterminePartitionsJobTest(boolean assumeGrouped, Long targetPartitionSize, String interval,
        int expectedNumOfSegments, int[] expectedNumOfShardsForEachSegment,
        String[][][] expectedStartEndForEachShard, List<String> data) throws IOException {
    this.expectedNumOfSegments = expectedNumOfSegments;
    this.expectedNumOfShardsForEachSegment = expectedNumOfShardsForEachSegment;
    this.expectedStartEndForEachShard = expectedStartEndForEachShard;

    dataFile = File.createTempFile("test_website_data", "tmp");
    dataFile.deleteOnExit();/*from  w  ww.j  a v  a2 s.c om*/
    tmpDir = Files.createTempDir();
    tmpDir.deleteOnExit();

    FileUtils.writeLines(dataFile, data);

    config = new HadoopDruidIndexerConfig(new HadoopIngestionSpec(
            new DataSchema("website",
                    HadoopDruidIndexerConfig.JSON_MAPPER.convertValue(
                            new StringInputRowParser(
                                    new CSVParseSpec(new TimestampSpec("timestamp", "yyyyMMddHH", null),
                                            new DimensionsSpec(DimensionsSpec.getDefaultSchemas(
                                                    ImmutableList.of("host", "country")), null, null),
                                            null,
                                            ImmutableList.of("timestamp", "host", "country", "visited_num"),
                                            false, 0),
                                    null),
                            Map.class),
                    new AggregatorFactory[] { new LongSumAggregatorFactory("visited_num", "visited_num") },
                    new UniformGranularitySpec(Granularities.DAY, Granularities.NONE,
                            ImmutableList.of(Intervals.of(interval))),
                    null, HadoopDruidIndexerConfig.JSON_MAPPER),
            new HadoopIOConfig(ImmutableMap.of("paths", dataFile.getCanonicalPath(), "type", "static"), null,
                    tmpDir.getCanonicalPath()),
            new HadoopTuningConfig(tmpDir.getCanonicalPath(), null,
                    new SingleDimensionPartitionsSpec(null, targetPartitionSize, null, assumeGrouped), null,
                    null, null, null, false, false, false, false, null, false, false, null, null, null, false,
                    false, null, null, null)));
}

From source file:org.apache.druid.indexer.IndexGeneratorJobTest.java

@Before
public void setUp() throws Exception {
    mapper = HadoopDruidIndexerConfig.JSON_MAPPER;
    mapper.registerSubtypes(new NamedType(HashBasedNumberedShardSpec.class, "hashed"));
    mapper.registerSubtypes(new NamedType(SingleDimensionShardSpec.class, "single"));

    dataFile = temporaryFolder.newFile();
    tmpDir = temporaryFolder.newFolder();

    HashMap<String, Object> inputSpec = new HashMap<String, Object>();
    inputSpec.put("paths", dataFile.getCanonicalPath());
    inputSpec.put("type", "static");
    if (inputFormatName != null) {
        inputSpec.put("inputFormat", inputFormatName);
    }//from   w ww. j  a  va  2s .c  om

    if (SequenceFileInputFormat.class.getName().equals(inputFormatName)) {
        writeDataToLocalSequenceFile(dataFile, data);
    } else {
        FileUtils.writeLines(dataFile, data);
    }

    config = new HadoopDruidIndexerConfig(new HadoopIngestionSpec(
            new DataSchema(datasourceName, mapper.convertValue(inputRowParser, Map.class), aggs,
                    new UniformGranularitySpec(Granularities.DAY, Granularities.NONE,
                            ImmutableList.of(this.interval)),
                    null, mapper),
            new HadoopIOConfig(ImmutableMap.copyOf(inputSpec), null, tmpDir.getCanonicalPath()),
            new HadoopTuningConfig(tmpDir.getCanonicalPath(), null, null, null, null, maxRowsInMemory,
                    maxBytesInMemory, false, false, false, false, ImmutableMap.of(JobContext.NUM_REDUCES, "0"), //verifies that set num reducers is ignored
                    false, useCombiner, null, true, null, forceExtendableShardSpecs, false, null, null, null)));

    config.setShardSpecs(loadShardSpecs(partitionType, shardInfoForEachSegment));
    config = HadoopDruidIndexerConfig.fromSpec(config.getSchema());
}

From source file:org.apache.gobblin.service.modules.core.MultiHopsFlowToJobSpecCompilerTest.java

@BeforeClass
public void setUp() throws Exception {
    // Create dir for template catalog
    FileUtils.forceMkdir(new File(TEST_TEMPLATE_CATALOG_PATH));

    // Create template to use in test
    List<String> templateEntries = new ArrayList<>();
    templateEntries.add("testProperty1 = \"testValue1\"");
    templateEntries.add("testProperty2 = \"test.Value1\"");
    templateEntries.add("testProperty3 = 100");
    FileUtils.writeLines(new File(TEST_TEMPLATE_CATALOG_PATH + "/" + TEST_TEMPLATE_NAME), templateEntries);

    // Initialize complier with template catalog
    Properties compilerWithTemplateCatalogProperties = new Properties();
    compilerWithTemplateCatalogProperties.setProperty(
            ServiceConfigKeys.TEMPLATE_CATALOGS_FULLY_QUALIFIED_PATH_KEY, TEST_TEMPLATE_CATALOG_URI);

    // Initialize compiler with common useful properties
    String testPath = TEST_SOURCE_NAME + "," + TEST_HOP_NAME_A + "," + TEST_HOP_NAME_B + "," + TEST_SINK_NAME;
    compilerWithTemplateCatalogProperties.setProperty(ServiceConfigKeys.POLICY_BASED_DATA_MOVEMENT_PATH,
            testPath);//from w  ww .j  ava  2 s. c o  m

    this.compilerWithTemplateCalague = new MultiHopsFlowToJobSpecCompiler(
            ConfigUtils.propertiesToConfig(compilerWithTemplateCatalogProperties));

    vertexSource = new BaseServiceNodeImpl(TEST_SOURCE_NAME);
    vertexHopA = new BaseServiceNodeImpl(TEST_HOP_NAME_A);
    vertexHopB = new BaseServiceNodeImpl(TEST_HOP_NAME_B);
    vertexHopC = new BaseServiceNodeImpl(TEST_HOP_NAME_C);
    vertexSink = new BaseServiceNodeImpl(TEST_SINK_NAME);

}

From source file:org.apache.hadoop.util.TestFileBasedIPList.java

public static void createFileWithEntries(String fileName, String[] ips) throws IOException {
    FileUtils.writeLines(new File(fileName), Arrays.asList(ips));
}

From source file:org.apache.hive.service.cli.session.TestSessionGlobalInitFile.java

@Before
public void setUp() throws Exception {
    super.setUp();

    // create and put .hiverc sample file to default directory
    initFile = File.createTempFile("test", "hive");
    tmpDir = initFile.getParentFile().getAbsoluteFile() + File.separator + "TestSessionGlobalInitFile";
    initFile.delete();//  w ww.j  a  va2 s. com
    FileUtils.deleteDirectory(new File(tmpDir));

    initFile = new File(tmpDir + File.separator + SessionManager.HIVERCFILE);
    initFile.getParentFile().mkdirs();
    initFile.createNewFile();

    String[] fileContent = new String[] { "-- global init hive file for test", "set a=1;", "set hiveconf:b=1;",
            "set hivevar:c=1;", "set d\\", "      =1;", "add jar " + initFile.getAbsolutePath() };
    FileUtils.writeLines(initFile, Arrays.asList(fileContent));

    // set up service and client
    hiveConf = new HiveConf();
    hiveConf.setVar(HiveConf.ConfVars.HIVE_SERVER2_GLOBAL_INIT_FILE_LOCATION,
            initFile.getParentFile().getAbsolutePath());
    hiveConf.setVar(HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER,
            "org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory");
    service = new FakeEmbeddedThriftBinaryCLIService(hiveConf);
    service.init(new HiveConf());
    client = new ThriftCLIServiceClient(service);
}

From source file:org.apache.hupa.server.guice.ServerModulTest.java

@Test
public void testLoadDemoProperties() throws Exception {
    File tmp = File.createTempFile("foo", ".properties");
    tmp.deleteOnExit();//from w  ww . j a v a  2  s  .  c  o  m
    Properties p = MockConstants.mockProperties;
    Collection<String> lines = new ArrayList<String>();
    for (Entry<Object, Object> e : p.entrySet()) {
        lines.add(e.getKey() + " = " + e.getValue());
    }
    FileUtils.writeLines(tmp, lines);

    System.setProperty(GuiceListener.SYS_PROP_CONFIG_FILE, tmp.getAbsolutePath());
    p = new GuiceListener().loadProperties();
    Assert.assertNotNull(p);
    Assert.assertEquals(MockConstants.mockSettings.getInboxFolderName(), p.get("DefaultInboxFolder"));
    Assert.assertEquals(MockConstants.mockSettings.getTrashFolderName(), p.get("DefaultTrashFolder"));
    Assert.assertEquals(MockConstants.mockSettings.getSentFolderName(), p.get("DefaultSentFolder"));
    System.clearProperty(GuiceListener.SYS_PROP_CONFIG_FILE);
}

From source file:org.apache.ignite.internal.client.impl.ClientPropertiesConfigurationSelfTest.java

/**
 * Uncomment properties.//from  w  w w . jav  a2 s .c om
 *
 * @param url Source to uncomment client properties for.
 * @return Temporary file with uncommented client properties.
 * @throws IOException In case of IO exception.
 */
private File uncommentProperties(URL url) throws IOException {
    InputStream in = url.openStream();

    assertNotNull(in);

    LineIterator it = IOUtils.lineIterator(in, "UTF-8");
    Collection<String> lines = new ArrayList<>();

    while (it.hasNext())
        lines.add(it.nextLine().replace("#ignite.client.", "ignite.client."));

    IgniteUtils.closeQuiet(in);

    File tmp = File.createTempFile(UUID.randomUUID().toString(), "properties");

    tmp.deleteOnExit();

    FileUtils.writeLines(tmp, lines);

    return tmp;
}

From source file:org.apache.lens.driver.hive.TestRemoteHiveDriver.java

/**
 * Creates the partitioned table.//from ww  w  .j  a v  a2  s  .c om
 *
 * @param tableName  the table name
 * @param partitions the partitions
 * @throws Exception the exception
 */
private void createPartitionedTable(String tableName, int partitions) throws Exception {
    queryConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, false);
    queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);

    QueryContext ctx = createContext("CREATE EXTERNAL TABLE IF NOT EXISTS " + tableName
            + " (ID STRING) PARTITIONED BY (DT STRING, ET STRING)", queryConf);

    driver.execute(ctx);
    Assert.assertEquals(0, driver.getHiveHandleSize());

    File dataDir = new File("target/partdata");
    dataDir.mkdir();

    // Add partitions
    for (int i = 0; i < partitions; i++) {
        // Create partition paths
        File tableDir = new File(dataDir, tableName);
        tableDir.mkdir();
        File partDir = new File(tableDir, "p" + i);
        partDir.mkdir();

        // Create data file
        File data = new File(partDir, "data.data");
        FileUtils.writeLines(data, Arrays.asList("one", "two", "three", "four", "five"));

        System.out.println("@@ Adding partition " + i);
        QueryContext partCtx = createContext("ALTER TABLE " + tableName + " ADD IF NOT EXISTS PARTITION (DT='p"
                + i + "', ET='1') LOCATION '" + partDir.getPath() + "'", queryConf);
        driver.execute(partCtx);
    }
}

From source file:org.apache.lens.server.LensServerTestUtil.java

public static void createTestDatabaseResources(String[] testDatabases, HiveConf conf) throws Exception {
    File srcJarDir = new File("target/testjars/");
    if (!srcJarDir.exists()) {
        // nothing to setup
        return;//from w ww. j  av  a 2  s .c  o  m
    }
    File resDir = new File("target/resources");
    if (!resDir.exists()) {
        resDir.mkdir();
    }

    // Create databases and resource dirs
    Hive hive = Hive.get(conf);
    File testJarFile = new File("target/testjars/test.jar");
    File serdeJarFile = new File("target/testjars/serde.jar");
    for (String db : testDatabases) {
        Database database = new Database();
        database.setName(db);
        hive.createDatabase(database, true);
        File dbDir = new File(resDir, db);
        if (!dbDir.exists()) {
            dbDir.mkdir();
        }
        // Add a jar in the directory
        try {

            String[] jarOrder = { "x_" + db + ".jar", "y_" + db + ".jar", "z_" + db + ".jar", "serde.jar", };

            // Jar order is -> z, y, x, File listing order is x, y, z
            // We are explicitly specifying jar order
            FileUtils.writeLines(new File(dbDir, "jar_order"),
                    Arrays.asList(jarOrder[2], jarOrder[1], jarOrder[0], jarOrder[3]));

            FileUtils.copyFile(testJarFile, new File(dbDir, jarOrder[0]));
            FileUtils.copyFile(testJarFile, new File(dbDir, jarOrder[1]));
            FileUtils.copyFile(testJarFile, new File(dbDir, jarOrder[2]));
            FileUtils.copyFile(serdeJarFile, new File(dbDir, jarOrder[3]));
        } catch (FileNotFoundException fnf) {
            log.error("File not found.", fnf);
        }
    }
}