Example usage for org.apache.hadoop.fs.s3a Constants ENDPOINT

List of usage examples for org.apache.hadoop.fs.s3a Constants ENDPOINT

Introduction

In this page you can find the example usage for org.apache.hadoop.fs.s3a Constants ENDPOINT.

Prototype

String ENDPOINT

To view the source code for org.apache.hadoop.fs.s3a Constants ENDPOINT.

Click Source Link

Usage

From source file:org.apache.carbondata.examples.sdk.SDKS3Example.java

License:Apache License

public static void main(String[] args) throws Exception {
    Logger logger = LogServiceFactory.getLogService(SDKS3Example.class.getName());
    if (args == null || args.length < 3) {
        logger.error("Usage: java CarbonS3Example: <access-key> <secret-key>"
                + "<s3-endpoint> [table-path-on-s3] [rows] [Number of writes]");
        System.exit(0);/*  w w  w  .j av a 2  s. c o  m*/
    }

    String backupProperty = CarbonProperties.getInstance().getProperty(
            CarbonLoadOptionConstants.ENABLE_CARBON_LOAD_DIRECT_WRITE_TO_STORE_PATH,
            CarbonLoadOptionConstants.ENABLE_CARBON_LOAD_DIRECT_WRITE_TO_STORE_PATH_DEFAULT);
    CarbonProperties.getInstance()
            .addProperty(CarbonLoadOptionConstants.ENABLE_CARBON_LOAD_DIRECT_WRITE_TO_STORE_PATH, "true");

    String path = "s3a://sdk/WriterOutput";
    if (args.length > 3) {
        path = args[3];
    }

    int rows = 3;
    if (args.length > 4) {
        rows = Integer.parseInt(args[4]);
    }
    int num = 3;
    if (args.length > 5) {
        num = Integer.parseInt(args[5]);
    }

    Configuration conf = new Configuration(true);
    conf.set(Constants.ACCESS_KEY, args[0]);
    conf.set(Constants.SECRET_KEY, args[1]);
    conf.set(Constants.ENDPOINT, args[2]);

    Field[] fields = new Field[2];
    fields[0] = new Field("name", DataTypes.STRING);
    fields[1] = new Field("age", DataTypes.INT);
    for (int j = 0; j < num; j++) {
        CarbonWriter writer = CarbonWriter.builder().outputPath(path).withHadoopConf(conf)
                .withCsvInput(new Schema(fields)).writtenBy("SDKS3Example").build();

        for (int i = 0; i < rows; i++) {
            writer.write(new String[] { "robot" + (i % 10), String.valueOf(i) });
        }
        writer.close();
    }
    // Read data

    EqualToExpression equalToExpression = new EqualToExpression(new ColumnExpression("name", DataTypes.STRING),
            new LiteralExpression("robot1", DataTypes.STRING));

    CarbonReader reader = CarbonReader.builder(path, "_temp").projection(new String[] { "name", "age" })
            .filter(equalToExpression).withHadoopConf(conf).build();

    System.out.println("\nData:");
    int i = 0;
    while (i < 20 && reader.hasNext()) {
        Object[] row = (Object[]) reader.readNextRow();
        System.out.println(row[0] + " " + row[1]);
        i++;
    }
    System.out.println("\nFinished");
    reader.close();

    // Read without filter
    CarbonReader reader2 = CarbonReader.builder(path, "_temp").projection(new String[] { "name", "age" })
            .withHadoopConf(ACCESS_KEY, args[0]).withHadoopConf(SECRET_KEY, args[1])
            .withHadoopConf(ENDPOINT, args[2]).build();

    System.out.println("\nData:");
    i = 0;
    while (i < 20 && reader2.hasNext()) {
        Object[] row = (Object[]) reader2.readNextRow();
        System.out.println(row[0] + " " + row[1]);
        i++;
    }
    System.out.println("\nFinished");
    reader2.close();

    CarbonProperties.getInstance().addProperty(
            CarbonLoadOptionConstants.ENABLE_CARBON_LOAD_DIRECT_WRITE_TO_STORE_PATH, backupProperty);
}