Example usage for java.lang ThreadGroup ThreadGroup

List of usage examples for java.lang ThreadGroup ThreadGroup

Introduction

In this page you can find the example usage for java.lang ThreadGroup ThreadGroup.

Prototype

public ThreadGroup(String name) 

Source Link

Document

Constructs a new thread group.

Usage

From source file:Main.java

public ThreadGroupDemo() {
    ThreadGroup pGroup = new ThreadGroup("Parent ThreadGroup");
    ThreadGroup cGroup = new ThreadGroup(pGroup, "Child ThreadGroup");

    Thread t1 = new Thread(pGroup, this);
    System.out.println("Starting " + t1.getName());
    t1.start();//from  www .  j  a  v  a  2  s  .co m

    Thread t2 = new Thread(cGroup, this);
    System.out.println("Starting " + t2.getName());
    t2.start();

    ThreadGroup[] grpList = new ThreadGroup[pGroup.activeGroupCount()];
    int count = pGroup.enumerate(grpList, true);
    for (int i = 0; i < count; i++) {
        System.out.println("ThreadGroup" + grpList[i].getName() + " found");
    }
}

From source file:Main.java

public ThreadGroupDemo() {
    ThreadGroup pGroup = new ThreadGroup("Parent ThreadGroup");

    ThreadGroup cGroup = new ThreadGroup(pGroup, "Child ThreadGroup");

    Thread t1 = new Thread(pGroup, this);
    System.out.println("Starting " + t1.getName());
    t1.start();/*w  ww.j a  v  a2 s  . c om*/

    Thread t2 = new Thread(cGroup, this);
    System.out.println("Starting " + t2.getName());
    t2.start();

    System.out.println("Listing parentThreadGroup: " + pGroup.getName());
    pGroup.list();

    System.out.println("Listing childThreadGroup : " + cGroup.getName());
    cGroup.list();

}

From source file:Main.java

public ThreadGroupDemo() {
    ThreadGroup pGroup = new ThreadGroup("Parent ThreadGroup");

    ThreadGroup cGroup = new ThreadGroup(pGroup, "Child ThreadGroup");

    Thread t1 = new Thread(pGroup, this);
    System.out.println("Starting " + t1.getName());
    t1.start();//w w  w  .  ja  va  2s. c  o  m

    Thread t2 = new Thread(cGroup, this);
    System.out.println("Starting " + t2.getName());
    t2.start();

    Thread[] list = new Thread[pGroup.activeCount()];
    int count = pGroup.enumerate(list, true);
    for (int i = 0; i < count; i++) {
        System.out.println("Thread " + list[i].getName() + " found");
    }

}

From source file:Main.java

public ThreadGroupDemo() {
    ThreadGroup pGroup = new ThreadGroup("Parent ThreadGroup");

    ThreadGroup cGroup = new ThreadGroup(pGroup, "Child ThreadGroup");

    Thread t1 = new Thread(pGroup, this);
    System.out.println("Starting " + t1.getName());
    t1.start();//  w  ww.  j a v a  2 s .c o m

    Thread t2 = new Thread(cGroup, this);
    System.out.println("Starting " + t2.getName());
    t2.start();

    // determine which ThreadGroup is parent
    boolean isParent = pGroup.parentOf(cGroup);
    System.out.println(pGroup.getName() + " is the parent of " + cGroup.getName() + "? " + isParent);

    isParent = cGroup.parentOf(pGroup);
    System.out.println(cGroup.getName() + " is the parent of " + pGroup.getName() + "? " + isParent);

}

From source file:Main.java

public ThreadGroupDemo() {

    ThreadGroup pGroup = new ThreadGroup("Parent ThreadGroup");

    ThreadGroup cGroup = new ThreadGroup(pGroup, "Child ThreadGroup");

    Thread t1 = new Thread(pGroup, this);
    System.out.println("Starting " + t1.getName());
    t1.start();//from   w  w  w  .j  a  v a2s.  c o m

    Thread t2 = new Thread(cGroup, this);
    System.out.println("Starting " + t2.getName());
    t2.start();

    Thread[] list = new Thread[pGroup.activeCount()];
    int count = pGroup.enumerate(list);
    for (int i = 0; i < count; i++) {
        System.out.println("Thread " + list[i].getName() + " found");
    }

}

From source file:Main.java

public ThreadGroupDemo() {
    try {//from ww  w . ja v  a  2 s.co  m
        ThreadGroup pGroup = new ThreadGroup("Parent ThreadGroup");

        ThreadGroup cGroup = new ThreadGroup(pGroup, "Child ThreadGroup");

        Thread t1 = new Thread(pGroup, this);
        System.out.println("Starting " + t1.getName());
        t1.start();

        Thread t2 = new Thread(cGroup, this);
        System.out.println("Starting " + t2.getName());
        t2.start();

        pGroup.checkAccess();
        System.out.println(pGroup.getName() + " has access");
        cGroup.checkAccess();
        System.out.println(cGroup.getName() + " has access");
    } catch (Exception ex) {
        System.out.println(ex.toString());
    }
}

From source file:Main.java

public ThreadGroupDemo() {
    try {//  www  .  j av a  2  s .  co  m
        ThreadGroup pGroup = new ThreadGroup("Parent ThreadGroup");

        ThreadGroup cGroup = new ThreadGroup(pGroup, "Child ThreadGroup");

        Thread t1 = new Thread(pGroup, this);
        System.out.println("Starting " + t1.getName());
        t1.start();

        Thread t2 = new Thread(cGroup, this);
        System.out.println("Starting " + t2.getName());
        t2.start();

        // block until the other threads finish
        t1.join();
        t2.join();

        // child group destroyed
        cGroup.destroy();
        System.out.println(cGroup.getName() + " destroyed");

        // parent group destroyed
        pGroup.destroy();
        System.out.println(pGroup.getName() + " destroyed");

    } catch (InterruptedException ex) {
        System.out.println(ex.toString());
    }
}

From source file:Main.java

public ThreadGroupDemo() {
    try {/*  w  ww.  j a  v  a  2  s . co m*/
        ThreadGroup pGroup = new ThreadGroup("Parent ThreadGroup");

        ThreadGroup cGroup = new ThreadGroup(pGroup, "Child ThreadGroup");

        Thread t1 = new Thread(pGroup, this);
        System.out.println("Starting " + t1.getName());
        t1.start();

        Thread t2 = new Thread(cGroup, this);
        System.out.println("Starting " + t2.getName());
        t2.start();

        t1.join();
        t2.join();

        if (!cGroup.isDestroyed()) {
            cGroup.destroy();
        } else {
            System.out.println(cGroup.getName() + " destroyed");
        }

        // parent group destroyed
        if (!pGroup.isDestroyed()) {
            pGroup.destroy();
        } else {
            System.out.println(pGroup.getName() + " destroyed");
        }

    } catch (Exception ex) {
        System.out.println(ex.toString());
    }
}

From source file:org.apache.hadoop.cifs.Cifs2HdfsClient.java

public static void main(String[] args) {

    // This handles parsing args.. This is a really crappy implementation. I
    // have a better one I can share from Commons-cli package

    Configuration conf = new Configuration();
    String[] otherArgs = null;/*from  w  ww . jav  a 2s  . co  m*/
    try {
        otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    } catch (IOException e4) {
        // TODO Auto-generated catch block
        e4.printStackTrace();
    }

    options = new Options();
    options.addOption("cifs_host", true, "CIFS/SMB Server Hostname --cifs_host winfileserver1.nt.example.com");
    options.addOption("cifs_domain", true, "CIFS/SMB Domain --cifs_domain nt.example.com");
    options.addOption("cifs_logonto", true, "CIFS/SMB LogonTo --cifs_logonto windc1nt, hadoopserver");
    options.addOption("cifs_input_folder", true, "CIFS/SMB Server Input Folder --cifs_input_folder M201209 ");
    options.addOption("cifs_output_folder", true,
            "CIFS/SMB Server Output Folder --cifs_output_folder M201209 ");
    options.addOption("cifs_input_file", true, "CIFS/SMB Server Single Input File filename.csv or filename*");
    options.addOption("cifs_userid", true, "CIFS/SMB Domain Userid --cifs_userid usergoeshere");
    options.addOption("cifs_pwd", true, "CIFS/SMB Domain Password --cifs_pwd passwordgoeshere");
    options.addOption("cifs_hadoop_cred_path", true,
            "CIFS Password --cifs_hadoop_cred_path /user/username/credstore.jceks");
    options.addOption("cifs_pwd_alias", true, "CIFS Password Alias --cifs_pwd_alias password.alias");
    options.addOption("transfer_limit", true,
            "# of transfers to execute simultaneously should not transfer Note: 10-15 = optimal --transfer_limit 10");
    options.addOption("max_depth", true, "CIFS ONLY - Max Depth to recurse --max_depth 10");
    options.addOption("ignore_top_folder_files", false, "CIFS ONLY - Ignore Top Level Folder files");
    options.addOption("no_nested_transfer", false, "CIFS ONLY - Do not nest into folders for transfer");
    options.addOption("hdfs_output_folder", true, "HDFS Output Folder --hdfs_output_dir /scm/");
    options.addOption("hdfs_input_folder", true, "HDFS Input Folder --hdfs_input_dir /scm/");
    // options.addOption("hdfs_input_file", true, "HDFS Single Input File
    // filename.csv or filename*");

    options.addOption("krb_keytab", true, "KeyTab File to Connect to HDFS --krb_keytab $HOME/S00000.keytab");
    options.addOption("krb_upn", true,
            "Kerberos Princpial for Keytab to Connect to HDFS --krb_upn S00000@EXAMP.EXAMPLE.COM");
    options.addOption("help", false, "Display help");

    CommandLineParser parser = new CIFSParser();
    CommandLine cmd = null;

    try {
        cmd = parser.parse(options, otherArgs);
    } catch (ParseException e2) {
        // TODO Auto-generated catch block
        e2.printStackTrace();
    }
    if (cmd.hasOption("cifs_host") && cmd.hasOption("cifs_domain") && cmd.hasOption("cifs_userid")) {
        cifsHost = cmd.getOptionValue("cifs_host");
        cifsDomain = cmd.getOptionValue("cifs_domain");
        cifsUserId = cmd.getOptionValue("cifs_userid");
        if (cmd.hasOption("cifs_pwd")) {
            cifsPwd = cmd.getOptionValue("cifs_pwd");
        } else if (cmd.hasOption("cifs_pwd_alias") && cmd.hasOption("cifs_hadoop_cred_path")) {
            cifsPwdAlias = cmd.getOptionValue("cifs_pwd_alias");
            cifsPwdCredPath = cmd.getOptionValue("cifs_hadoop_cred_path");
        } else {
            System.out.println("Missing CIFS Password / CIFS Password Alias / CIFS Hadoop Cred Path");
            missingParams();
            System.exit(0);
        }
        if (cmd.hasOption("cifs_logonto")) {
            cifsLogonTo = cmd.getOptionValue("cifs_logonto");

        } else {
            cifsLogonTo = null;
        }
        if (cmd.hasOption("ignore_top_folder_files")) {
            ignoreTopFolder = true;
        }
        if (cmd.hasOption("no_nested_transfer")) {
            noNesting = true;
        }
        if (cmd.hasOption("transfer_limit")) {
            transferLimitTrue = true;
            transferLimit = cmd.getOptionValue("transfer_limit");
        }
        if (cmd.hasOption("max_depth")) {
            maxDepth = Integer.valueOf(cmd.getOptionValue("max_depth"));
        }
        if (cmd.hasOption("hdfs_input_folder") && cmd.hasOption("cifs_output_folder")) {
            hdfsInputFolder = cmd.getOptionValue("hdfs_input_folder");
            cifsOutputFolder = cmd.getOptionValue("cifs_output_folder");
            hdfs2cifs = true;
            if (!(cifsOutputFolder.startsWith("/"))) {
                cifsOutputFolder = "/" + cifsOutputFolder;
                cifsOutputFolder.substring(1, cifsOutputFolder.length());
            }
            if (!(cifsOutputFolder.endsWith("/"))) {
                cifsOutputFolder.substring(0, cifsOutputFolder.length() - 1);
            }
            /*
             * if (cmd.hasOption("hdfs_input_file")) { hdfsInputFile =
             * cmd.getOptionValue("hdfs_input_file"); maxDepth = -1; noNesting = true; }
             */
        }
        if (cmd.hasOption("hdfs_output_folder") && cmd.hasOption("cifs_input_folder")) {
            cifsInputFolder = cmd.getOptionValue("cifs_input_folder");
            if (!(cifsInputFolder.startsWith("/"))) {
                cifsInputFolder = "/" + cifsInputFolder;
            }
            if (!(cifsInputFolder.endsWith("/"))) {
                cifsInputFolder = cifsInputFolder + "/";
            }
            hdfsOutputFolder = cmd.getOptionValue("hdfs_output_folder");
            cifs2hdfs = true;
            if (cmd.hasOption("cifs_input_file")) {
                cifsInputFile = cmd.getOptionValue("cifs_input_file");
                maxDepth = -1;
                noNesting = true;
            }
        }
        if (cifs2hdfs && hdfs2cifs) {
            System.out.println(
                    "Error Cannot specify hdfs_output_folder/hdfs_input_folder or cifs_output_folder/cifs_input_folder together");
            missingParams();
            System.exit(0);
        }

    } else {
        missingParams();
        System.exit(0);
    }

    if (cmd.hasOption("krb_keytab") && cmd.hasOption("krb_upn")) {
        setKrb = true;
        keytab = cmd.getOptionValue("krb_keytab");
        keytabupn = cmd.getOptionValue("krb_upn");
        File keytabFile = new File(keytab);
        if (keytabFile.exists()) {
            if (!(keytabFile.canRead())) {
                System.out.println("KeyTab  exists but cannot read it - exiting");
                missingParams();
                System.exit(1);
            }
        } else {
            System.out.println("KeyTab doesn't exist  - exiting");
            missingParams();
            System.exit(1);
        }
    }
    hdfsClient = new HdfsClient(setKrb, keytabupn, keytab);
    hdfsClient.checkSecurity();

    if (cifsPwdCredPath != null && cifsPwdAlias != null) {
        cifsPwd = hdfsClient.getCredsViaJceks(cifsPwdCredPath, cifsPwdAlias);
    }

    if (hdfs2cifs) {
        cifsClient = new CifsClient(cifsLogonTo, cifsUserId, cifsPwd, cifsDomain, -1, false);
        List<String> hdfsfileList = null;
        try {
            hdfsfileList = hdfsClient.getHdfsFiles(hdfsInputFolder);
        } catch (IOException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        } catch (InterruptedException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        }

        // Spins up a thread per directory to allow some parallelism..
        // Theoretically this can be run as a Mapreduce job
        ThreadGroup cifsTg = new ThreadGroup("CifsThreadGroup");

        for (int i = 0; i < hdfsfileList.size(); i++) {
            String fileName = hdfsfileList.get(i);
            HDFS2CifsThread sc = null;
            if (transferLimitTrue) {
                while (Integer.valueOf(transferLimit) == cifsTg.activeCount()) {
                    synchronized (objectWaiter) {
                        try {
                            objectWaiter.wait(10000L);
                        } catch (InterruptedException e) {
                            // TODO Auto-generated catch block
                            e.printStackTrace();
                        }
                    }
                }
            }
            String threadName = "cifs" + i;
            sc = new HDFS2CifsThread(cifsClient, cifsTg, threadName, fileName, cifsHost, cifsOutputFolder,
                    setKrb, keytabupn, keytab);

            sc.start();
        }
    }

    if (cifs2hdfs) {
        cifsClient = new CifsClient(cifsLogonTo, cifsUserId, cifsPwd, cifsDomain, Integer.valueOf(maxDepth),
                noNesting);

        SmbFile smbFileConn = cifsClient.createInitialConnection(cifsHost, cifsInputFolder);

        try {
            cifsClient.traverse(smbFileConn, Integer.valueOf(maxDepth), ignoreTopFolder, cifsInputFile);
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        cifsFileList = cifsClient.getFileList();
        int cifsCount = cifsFileList.size();

        // Spins up a thread per directory to allow some parallelism..
        // Theoretically this can be run as a Mapreduce job
        ThreadGroup cifsTg = new ThreadGroup("CifsThreadGroup");

        for (int i = 0; i < cifsCount; i++) {
            String fileName = cifsFileList.get(i);
            Cifs2HDFSThread sc = null;
            if (transferLimitTrue) {
                while (Integer.valueOf(transferLimit) == cifsTg.activeCount()) {
                    synchronized (objectWaiter) {
                        try {
                            objectWaiter.wait(10000L);
                        } catch (InterruptedException e) {
                            // TODO Auto-generated catch block
                            e.printStackTrace();
                        }
                    }
                }
            }
            try {
                String threadName = "cifs" + i;
                sc = new Cifs2HDFSThread(cifsTg, threadName, new SmbFile(fileName, cifsClient.auth),
                        hdfsOutputFolder, cifsHost, cifsInputFolder, setKrb, keytabupn, keytab);
            } catch (MalformedURLException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
            sc.start();
        }
    }
}

From source file:com.jkoolcloud.tnt4j.streams.sample.custom.SampleIntegration.java

/**
 * Configure streams and parsers, and run each stream in its own thread.
 *
 * @param cfgFileName//  w  ww .  java  2 s. com
 *            configuration file name
 */
public static void loadConfigAndRun(String cfgFileName) {
    try {
        StreamsConfigLoader cfg = StringUtils.isEmpty(cfgFileName) ? new StreamsConfigLoader()
                : new StreamsConfigLoader(cfgFileName);
        Collection<TNTInputStream<?, ?>> streams = cfg.getStreams();
        if (streams == null || streams.isEmpty()) {
            throw new IllegalStateException("No Activity Streams found in configuration"); // NON-NLS
        }

        ThreadGroup streamThreads = new ThreadGroup("Streams"); // NON-NLS
        StreamThread ft;
        for (TNTInputStream<?, ?> stream : streams) {
            ft = new StreamThread(streamThreads, stream,
                    String.format("%s:%s", stream.getClass().getSimpleName(), stream.getName())); // NON-NLS
            ft.start();
        }
    } catch (Exception e) {
        LOGGER.log(OpLevel.ERROR, String.valueOf(e.getLocalizedMessage()), e);
    }
}