Example usage for org.apache.hadoop.conf Configuration addResource

List of usage examples for org.apache.hadoop.conf Configuration addResource

Introduction

In this page you can find the example usage for org.apache.hadoop.conf Configuration addResource.

Prototype

public void addResource(Configuration conf) 

Source Link

Document

Add a configuration resource.

Usage

From source file:FlinkBootstrap.java

License:Apache License

public static void main(String[] args) throws Exception {

    if (args.length != 2) {
        throw new IllegalArgumentException(
                "Provide `TaskManager` or `JobManager` parameter with config folder");
    }//w  ww  .ja  va  2 s .  c om

    //Load Hadoop S3 wrapper classes, due to ClassNotFound Exception without
    Class.forName("org.apache.flink.runtime.fs.hdfs.HadoopFileSystem");
    Class.forName("org.apache.hadoop.fs.s3a.S3AFileSystem");

    //Verify s3 is accessible
    Configuration conf = new Configuration();
    conf.addResource(new Path("config/hadoop/core-site.xml"));
    conf.addResource(new Path("config/hadoop/hdfs-site.xml"));
    FileSystem fs = FileSystem.get(conf);
    fs.listStatus(new Path("s3://dir"));

    if (args[0].equals("TaskManager")) {
        TaskManager.main(new String[] { "--configDir", args[1], });
    } else if (args[0].equals("JobManager")) {
        JobManager.main(new String[] { "--configDir", args[1], "--executionMode", "cluster", });
    } else {
        throw new IllegalArgumentException("Unknown parameter `" + args[0] + "`");
    }
}

From source file:ConfTest.java

License:Open Source License

public static void main(String[] args) {
    Configuration conf = new Configuration();
    conf.addResource(new Path("d:\\test\\a.xml"));
    System.out.println(conf.get("aaa"));
}

From source file:JavaCustomReceiver.java

License:Apache License

/** Create a socket connection and receive data until receiver is stopped */
private void receive() {
    Socket socket = null;/* w  w  w .j a v  a  2  s  . co  m*/
    String userInput = null;

    try {
        // connect to the server
        socket = new Socket(host, port);

        //   BufferedReader reader = new BufferedReader(new InputStreamReader(socket.getInputStream()));

        //      Path pt=new Path("hdfs://192.168.0.1:9000/equinox-sanjose.20120119-netflow.txt");
        //      FileSystem fs = FileSystem.get(new Configuration());
        //      BufferedReader in=new BufferedReader(new InputStreamReader(fs.open(pt)));
        Path pt = new Path("hdfs://192.168.0.1:9000/user/hduser/equinox-sanjose.20120119-netflow.txt");

        Configuration conf = new Configuration();
        conf.addResource(new Path("/usr/local/hadoop/conf/core-site.xml"));
        conf.addResource(new Path("/usr/local/hadoop/conf/hdfs-site.xml"));
        //      FileSystem fs = FileSystem.get(conf);
        FileSystem fs = pt.getFileSystem(conf);
        System.out.println(fs.getHomeDirectory());
        BufferedReader in = new BufferedReader(new InputStreamReader(fs.open(pt)));

        //      BufferedReader in = new BufferedReader(
        //            new FileReader(
        //                  "/home/hduser/spark_scratchPad/equinox-sanjose.20120119-netflow.txt"));
        //      
        // Until stopped or connection broken continue reading
        while (!isStopped() && (userInput = in.readLine()) != null) {
            System.out.println("Received data '" + userInput + "'");
            store(userInput);
        }
        in.close();
        socket.close();

        // Restart in an attempt to connect again when server is active again
        restart("Trying to connect again");
    } catch (ConnectException ce) {
        // restart if could not connect to server
        restart("Could not connect", ce);
    } catch (Throwable t) {
        restart("Error receiving data", t);
    }
}

From source file:TestFSConfig.java

License:Open Source License

public static void main(String[] argv) {
    Configuration conf = new Configuration();
    conf.addResource(ConstVar.FormatStorageConf);

    int segmentSize = conf.getInt(ConstVar.ConfSegmentSize, -1);
    int unitSize = conf.getInt(ConstVar.ConfUnitSize, -2);
    int poolSize = conf.getInt(ConstVar.ConfPoolSize, -3);

    System.out.println("seg:" + segmentSize + ",unit:" + unitSize + ",pool:" + poolSize);
}

From source file:$.ApplicationTest.java

@Test
    public void testApplication() throws IOException, Exception {
        try {/*from w  w  w  .  j  av a  2  s  .  co m*/
            LocalMode lma = LocalMode.newInstance();
            Configuration conf = new Configuration(false);
            conf.addResource(this.getClass().getResourceAsStream("/META-INF/properties.xml"));
            lma.prepareDAG(new Application(), conf);
            LocalMode.Controller lc = lma.getController();
            lc.run(10000); // runs for 10 seconds and quits
        } catch (ConstraintViolationException e) {
            Assert.fail("constraint violations: " + e.getConstraintViolations());
        }
    }

From source file:azkaban.AzkabanCommonModule.java

License:Apache License

@Inject
@Provides// ww w .  ja v  a2s  .  c o m
@Singleton
public Configuration createHadoopConfiguration() {
    final String hadoopConfDirPath = requireNonNull(this.props.get(HADOOP_CONF_DIR_PATH));

    final File hadoopConfDir = new File(requireNonNull(hadoopConfDirPath));
    checkArgument(hadoopConfDir.exists() && hadoopConfDir.isDirectory());

    final Configuration hadoopConf = new Configuration(false);
    hadoopConf.addResource(new org.apache.hadoop.fs.Path(hadoopConfDirPath, "core-site.xml"));
    hadoopConf.addResource(new org.apache.hadoop.fs.Path(hadoopConfDirPath, "hdfs-site.xml"));
    hadoopConf.set("fs.hdfs.impl", org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
    return hadoopConf;
}

From source file:azkaban.jobtype.StatsUtils.java

License:Apache License

public static Properties getJobConf(RunningJob runningJob) {
    try {/* w ww .  jav  a 2s  .c o  m*/
        Path path = new Path(runningJob.getJobFile());
        Configuration conf = new Configuration(false);
        FileSystem fs = FileSystem.get(new Configuration());
        InputStream in = fs.open(path);
        conf.addResource(in);
        return getJobConf(conf);
    } catch (FileNotFoundException e) {
        logger.warn("Job conf not found.");
    } catch (IOException e) {
        logger.warn("Error while retrieving job conf: " + e.getMessage());
    }
    return null;
}

From source file:br.ufrj.nce.recureco.distributedindex.search.controller.DocumentViewerServlet.java

License:Open Source License

protected void doGet(javax.servlet.http.HttpServletRequest request,
        javax.servlet.http.HttpServletResponse response) throws javax.servlet.ServletException, IOException {

    String doc = request.getParameter("doc");

    if (doc != null && doc.trim().length() > 0) {

        try {/*from w  ww  .  ja va 2s .c  o m*/

            String filePath = DIR_DOWNLOAD + doc;

            Configuration conf = new Configuration();

            conf.addResource(new Path(DIR_HADOOP_CONF + "core-site.xml"));
            conf.addResource(new Path(DIR_HADOOP_CONF + "hdfs-site.xml"));
            conf.addResource(new Path(DIR_HADOOP_CONF + "mapred-site.xml"));

            FileSystem fileSystem = FileSystem.get(conf);

            Path path = new Path(filePath);
            if (!fileSystem.exists(path)) {
                response.getWriter().print("File not found.");
                return;
            }

            FSDataInputStream in = fileSystem.open(path);

            response.setContentType("text/plain");

            int read = 0;
            byte[] bytes = new byte[BYTES_DOWNLOAD];
            OutputStream os = response.getOutputStream();

            while ((read = in.read(bytes)) != -1) {
                os.write(bytes, 0, read);
            }
            os.flush();
            os.close();
        } catch (FileNotFoundException e) {
            response.getWriter().print("File not found.");
        }

    } else {
        //print invalid document
        response.getWriter().print("File not informed.");
    }

}

From source file:cn.edu.hfut.dmic.webcollector.util.CrawlerConfiguration.java

public static Configuration create() {
    Configuration conf = new Configuration();
    conf.addResource("crawler-default.xml");
    conf.addResource("hadoop/core-site.xml");
    conf.addResource("hadoop/hdfs-site.xml");
    conf.addResource("hadoop/mapred-site.xml");

    //conf.set("mapred.jar", "/home/hu/mygit/WebCollector2/WebCollectorCluster/target/WebCollectorCluster-2.0.jar");
    return conf;/*ww w  .java  2  s  . com*/
}

From source file:cn.edu.hfut.dmic.webcollectorcluster.util.CrawlerConfiguration.java

public static Configuration create() {
    Configuration conf = new Configuration();
    conf.addResource("crawler-default.xml");
    conf.addResource("hadoop/core-site.xml");
    conf.addResource("hadoop/hdfs-site.xml");
    conf.addResource("hadoop/mapred-site.xml");
    //conf.set("mapred.jar", "/home/hu/mygit/WebCollector2/WebCollectorCluster/target/WebCollectorCluster-2.0.jar");
    return conf;/* w w w.ja v  a 2  s . c o  m*/
}