Example usage for org.apache.commons.lang RandomStringUtils randomAlphabetic

List of usage examples for org.apache.commons.lang RandomStringUtils randomAlphabetic

Introduction

In this page you can find the example usage for org.apache.commons.lang RandomStringUtils randomAlphabetic.

Prototype

public static String randomAlphabetic(int count) 

Source Link

Document

Creates a random string whose length is the number of characters specified.

Characters will be chosen from the set of alphabetic characters.

Usage

From source file:org.apache.falcon.resource.TestContext.java

public static void prepare(String clusterTemplate, boolean disableLineage) throws Exception {
    // setup a logged in user
    CurrentUser.authenticate(REMOTE_USER);

    if (disableLineage) {
        // disable recording lineage metadata
        String services = StartupProperties.get().getProperty("application.services");
        StartupProperties.get().setProperty("application.services",
                services.replace("org.apache.falcon.metadata.MetadataMappingService", ""));
    }//  www .  j a v  a 2  s .c  om

    Map<String, String> overlay = new HashMap<String, String>();
    overlay.put("cluster", RandomStringUtils.randomAlphabetic(5));
    overlay.put("colo", DeploymentUtil.getCurrentColo());
    TestContext.overlayParametersOverTemplate(clusterTemplate, overlay);
    EmbeddedCluster cluster = EmbeddedCluster.newCluster(overlay.get("cluster"), true);

    cleanupStore();

    // setup dependent workflow and lipath in hdfs
    FileSystem fs = FileSystem.get(cluster.getConf());
    mkdir(fs, new Path("/falcon"), new FsPermission((short) 511));

    Path wfParent = new Path("/falcon/test");
    fs.delete(wfParent, true);
    Path wfPath = new Path(wfParent, "workflow");
    mkdir(fs, wfPath);
    mkdir(fs, new Path("/falcon/test/workflow/lib"));
    fs.copyFromLocalFile(false, true, new Path(TestContext.class.getResource("/fs-workflow.xml").getPath()),
            new Path(wfPath, "workflow.xml"));
    mkdir(fs, new Path(wfParent, "input/2012/04/20/00"));
    Path outPath = new Path(wfParent, "output");
    mkdir(fs, outPath, new FsPermission((short) 511));

    // init cluster locations
    initClusterLocations(cluster, fs);
}

From source file:org.apache.flume.channel.kafka.TestKafkaChannel.java

@Before
public void setup() throws Exception {
    boolean topicFound = false;
    while (!topicFound) {
        topic = RandomStringUtils.randomAlphabetic(8);
        if (!usedTopics.contains(topic)) {
            usedTopics.add(topic);//from   w ww .  j ava 2  s  . com
            topicFound = true;
        }
    }
    try {
        createTopic(topic);
    } catch (Exception e) {
    }
    Thread.sleep(2500);
    latch = new CountDownLatch(5);
}

From source file:org.apache.flume.channel.kafka.TestKafkaChannel.java

@Test
public void testNoParsingAsFlumeAgent() throws Exception {
    final KafkaChannel channel = startChannel(false);
    Producer<String, byte[]> producer = new Producer<String, byte[]>(
            new ProducerConfig(channel.getKafkaConf()));
    List<KeyedMessage<String, byte[]>> original = Lists.newArrayList();
    for (int i = 0; i < 50; i++) {
        KeyedMessage<String, byte[]> data = new KeyedMessage<String, byte[]>(topic, null,
                RandomStringUtils.randomAlphabetic(6), String.valueOf(i).getBytes());
        original.add(data);/*www. j ava 2 s  . co m*/
    }
    producer.send(original);
    ExecutorCompletionService<Void> submitterSvc = new ExecutorCompletionService<Void>(
            Executors.newCachedThreadPool());
    List<Event> events = pullEvents(channel, submitterSvc, 50, false, false);
    wait(submitterSvc, 5);
    Set<Integer> finals = Sets.newHashSet();
    for (int i = 0; i < 50; i++) {
        finals.add(Integer.parseInt(new String(events.get(i).getBody())));
    }
    for (int i = 0; i < 50; i++) {
        Assert.assertTrue(finals.contains(i));
        finals.remove(i);
    }
    Assert.assertTrue(finals.isEmpty());
    channel.stop();
}

From source file:org.apache.flume.sink.kafka.TestKafkaSink.java

public String findUnusedTopic() {
    String newTopic = null;//  ww  w  .j av a  2  s .c om
    boolean topicFound = false;
    while (!topicFound) {
        newTopic = RandomStringUtils.randomAlphabetic(8);
        if (!usedTopics.contains(newTopic)) {
            usedTopics.add(newTopic);
            topicFound = true;
        }
    }
    return newTopic;
}

From source file:org.apache.gobblin.data.management.copy.writer.FileAwareInputStreamDataWriterTest.java

@Test
public void testWriteWithGPGSymmetricEncryption() throws Exception {
    byte[] streamString = "testEncryptedContents".getBytes("UTF-8");

    FileStatus status = fs.getFileStatus(testTempPath);
    OwnerAndPermission ownerAndPermission = new OwnerAndPermission(status.getOwner(), status.getGroup(),
            new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
    CopyableFile cf = CopyableFileUtils.getTestCopyableFile(ownerAndPermission);

    CopyableDatasetMetadata metadata = new CopyableDatasetMetadata(
            new TestCopyableDataset(new Path("/source")));

    WorkUnitState state = TestUtils.createTestWorkUnitState();
    state.setProp(ConfigurationKeys.WRITER_STAGING_DIR, new Path(testTempPath, "staging").toString());
    state.setProp(ConfigurationKeys.WRITER_OUTPUT_DIR, new Path(testTempPath, "output").toString());
    state.setProp(ConfigurationKeys.WRITER_FILE_PATH, RandomStringUtils.randomAlphabetic(5));
    state.setProp("writer.encrypt." + EncryptionConfigParser.ENCRYPTION_ALGORITHM_KEY, "gpg");
    state.setProp("writer.encrypt." + EncryptionConfigParser.ENCRYPTION_KEYSTORE_PASSWORD_KEY, "testPassword");

    CopySource.serializeCopyEntity(state, cf);
    CopySource.serializeCopyableDataset(state, metadata);

    FileAwareInputStreamDataWriter dataWriter = new FileAwareInputStreamDataWriter(state, 1, 0);

    FileAwareInputStream fileAwareInputStream = new FileAwareInputStream(cf,
            StreamUtils.convertStream(new ByteArrayInputStream(streamString)));
    dataWriter.write(fileAwareInputStream);
    dataWriter.commit();//from  w ww  .  j  a v a  2  s.c  o m

    Path writtenFilePath = new Path(new Path(state.getProp(ConfigurationKeys.WRITER_OUTPUT_DIR),
            cf.getDatasetAndPartition(metadata).identifier()), cf.getDestination());
    Assert.assertTrue(writtenFilePath.getName().endsWith("gpg"),
            "Expected encryption name to be appended to destination");
    byte[] encryptedContent = IOUtils.toByteArray(new FileInputStream(writtenFilePath.toString()));
    byte[] decryptedContent = new byte[streamString.length];
    IOUtils.readFully(
            GPGFileDecryptor.decryptFile(new FileInputStream(writtenFilePath.toString()), "testPassword"),
            decryptedContent);

    // encrypted string should not be the same as the plaintext
    Assert.assertNotEquals(encryptedContent, streamString);

    // decrypted string should be the same as the plaintext
    Assert.assertEquals(decryptedContent, streamString);

}

From source file:org.apache.gobblin.data.management.copy.writer.FileAwareInputStreamDataWriterTest.java

@Test
public void testWriteWithGPGAsymmetricEncryption() throws Exception {
    byte[] streamString = "testEncryptedContents".getBytes("UTF-8");

    FileStatus status = fs.getFileStatus(testTempPath);
    OwnerAndPermission ownerAndPermission = new OwnerAndPermission(status.getOwner(), status.getGroup(),
            new FsPermission(FsAction.ALL, FsAction.ALL, FsAction.ALL));
    CopyableFile cf = CopyableFileUtils.getTestCopyableFile(ownerAndPermission);

    CopyableDatasetMetadata metadata = new CopyableDatasetMetadata(
            new TestCopyableDataset(new Path("/source")));

    WorkUnitState state = TestUtils.createTestWorkUnitState();
    state.setProp(ConfigurationKeys.WRITER_STAGING_DIR, new Path(testTempPath, "staging").toString());
    state.setProp(ConfigurationKeys.WRITER_OUTPUT_DIR, new Path(testTempPath, "output").toString());
    state.setProp(ConfigurationKeys.WRITER_FILE_PATH, RandomStringUtils.randomAlphabetic(5));
    state.setProp("writer.encrypt." + EncryptionConfigParser.ENCRYPTION_ALGORITHM_KEY, "gpg");

    File publicKeyFile = new File(testTempPath.toString(), "public.key");

    FileUtils.copyInputStreamToFile(GPGFileEncryptor.class.getResourceAsStream(GPGFileEncryptorTest.PUBLIC_KEY),
            publicKeyFile);/*  w ww  .jav a 2  s.  co  m*/

    state.setProp("writer.encrypt." + EncryptionConfigParser.ENCRYPTION_KEYSTORE_PATH_KEY,
            publicKeyFile.getAbsolutePath());
    state.setProp("writer.encrypt." + EncryptionConfigParser.ENCRYPTION_KEYSTORE_PASSWORD_KEY,
            GPGFileEncryptorTest.PASSPHRASE);
    state.setProp("writer.encrypt." + EncryptionConfigParser.ENCRYPTION_KEY_NAME, GPGFileEncryptorTest.KEY_ID);

    CopySource.serializeCopyEntity(state, cf);
    CopySource.serializeCopyableDataset(state, metadata);

    FileAwareInputStreamDataWriter dataWriter = new FileAwareInputStreamDataWriter(state, 1, 0);

    FileAwareInputStream fileAwareInputStream = new FileAwareInputStream(cf,
            StreamUtils.convertStream(new ByteArrayInputStream(streamString)));
    dataWriter.write(fileAwareInputStream);
    dataWriter.commit();

    Path writtenFilePath = new Path(new Path(state.getProp(ConfigurationKeys.WRITER_OUTPUT_DIR),
            cf.getDatasetAndPartition(metadata).identifier()), cf.getDestination());
    Assert.assertTrue(writtenFilePath.getName().endsWith("gpg"),
            "Expected encryption name to be appended to destination");
    byte[] encryptedContent = IOUtils.toByteArray(new FileInputStream(writtenFilePath.toString()));
    byte[] decryptedContent = new byte[streamString.length];
    IOUtils.readFully(GPGFileDecryptor.decryptFile(new FileInputStream(writtenFilePath.toString()),
            GPGFileEncryptor.class.getResourceAsStream(GPGFileEncryptorTest.PRIVATE_KEY),
            GPGFileEncryptorTest.PASSPHRASE), decryptedContent);

    // encrypted string should not be the same as the plaintext
    Assert.assertNotEquals(encryptedContent, streamString);

    // decrypted string should be the same as the plaintext
    Assert.assertEquals(decryptedContent, streamString);

}

From source file:org.apache.hadoop.fs.azure.TestNativeAzureFileSystemAppend.java

private static byte[] getTestData(int size) {
    byte[] testData = new byte[size];
    System.arraycopy(RandomStringUtils.randomAlphabetic(size).getBytes(), 0, testData, 0, size);
    return testData;
}

From source file:org.apache.hadoop.hbase.chaos.actions.AddColumnAction.java

@Override
public void perform() throws Exception {
    HTableDescriptor tableDescriptor = admin.getTableDescriptor(tableName);
    HColumnDescriptor columnDescriptor = null;

    while (columnDescriptor == null || tableDescriptor.getFamily(columnDescriptor.getName()) != null) {
        columnDescriptor = new HColumnDescriptor(RandomStringUtils.randomAlphabetic(5));
    }//from w w w  .  j a  va  2 s.co m

    LOG.debug("Performing action: Adding " + columnDescriptor + " to " + tableNameString);

    tableDescriptor.addFamily(columnDescriptor);
    admin.modifyTable(tableName, tableDescriptor);
}

From source file:org.apache.hadoop.hbase.regionserver.TestTimestampFilterSeekHint.java

@Before
public void prepareRegion() throws IOException {
    region = TEST_UTIL.createTestRegion("TestTimestampFilterSeekHint" + regionCount++,
            new HColumnDescriptor(FAMILY).setBlocksize(1024).setMaxVersions(MAX_VERSIONS));

    for (long i = 0; i < MAX_VERSIONS - 2; i++) {
        Put p = new Put(RK_BYTES, i);
        p.addColumn(FAMILY_BYTES, QUAL_BYTES, Bytes.toBytes(RandomStringUtils.randomAlphabetic(255)));
        region.put(p);//www . j a  v  a  2 s .c  o  m
    }
    region.flush(true);
}

From source file:org.apache.hadoop.hdfs.protocol.datatransfer.sasl.SaslDataTransferTestCase.java

@BeforeClass
public static void initKdc() throws Exception {
    baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
            SaslDataTransferTestCase.class.getSimpleName());
    FileUtil.fullyDelete(baseDir);//from w ww .  j  a  v  a2  s. co  m
    assertTrue(baseDir.mkdirs());

    Properties kdcConf = MiniKdc.createConf();
    kdc = new MiniKdc(kdcConf, baseDir);
    kdc.start();

    String userName = RandomStringUtils.randomAlphabetic(8);
    File userKeytabFile = new File(baseDir, userName + ".keytab");
    userKeyTab = userKeytabFile.getAbsolutePath();
    kdc.createPrincipal(userKeytabFile, userName + "/localhost");
    userPrincipal = userName + "/localhost@" + kdc.getRealm();

    String superUserName = "hdfs";
    File hdfsKeytabFile = new File(baseDir, superUserName + ".keytab");
    hdfsKeytab = hdfsKeytabFile.getAbsolutePath();
    kdc.createPrincipal(hdfsKeytabFile, superUserName + "/localhost", "HTTP/localhost");
    hdfsPrincipal = superUserName + "/localhost@" + kdc.getRealm();
    spnegoPrincipal = "HTTP/localhost@" + kdc.getRealm();
}