Example usage for java.util.concurrent PriorityBlockingQueue PriorityBlockingQueue

List of usage examples for java.util.concurrent PriorityBlockingQueue PriorityBlockingQueue

Introduction

In this page you can find the example usage for java.util.concurrent PriorityBlockingQueue PriorityBlockingQueue.

Prototype

public PriorityBlockingQueue() 

Source Link

Document

Creates a PriorityBlockingQueue with the default initial capacity (11) that orders its elements according to their Comparable natural ordering .

Usage

From source file:Main.java

/**
 * Given any of the known collection types, this method will return an instance of the collection.
 * @param collectionType    the type of the collection
 * @return the collection instance//from w  ww  .j a  va  2  s .  c om
 */
public static Collection<?> getCollection(Class<?> collectionType) {
    if (HashSet.class.equals(collectionType)) {
        return new HashSet<Object>();
    } else if (TreeSet.class.equals(collectionType)) {
        return new TreeSet<Object>();
    } else if (CopyOnWriteArraySet.class.equals(collectionType)) {
        return new CopyOnWriteArraySet<Object>();
    } else if (LinkedHashSet.class.equals(collectionType)) {
        return new LinkedHashSet<Object>();
    } else if (ArrayList.class.equals(collectionType)) {
        return new ArrayList<Object>();
    } else if (LinkedList.class.equals(collectionType)) {
        return new LinkedList<Object>();
    } else if (Vector.class.equals(collectionType)) {
        return new Vector<Object>();
    } else if (Stack.class.equals(collectionType)) {
        return new Stack<Object>();
    } else if (PriorityQueue.class.equals(collectionType)) {
        return new PriorityQueue<Object>();
    } else if (PriorityBlockingQueue.class.equals(collectionType)) {
        return new PriorityBlockingQueue<Object>();
    } else if (ArrayDeque.class.equals(collectionType)) {
        return new ArrayDeque<Object>();
    } else if (ConcurrentLinkedQueue.class.equals(collectionType)) {
        return new ConcurrentLinkedQueue<Object>();
    } else if (LinkedBlockingQueue.class.equals(collectionType)) {
        return new LinkedBlockingQueue<Object>();
    } else if (LinkedBlockingDeque.class.equals(collectionType)) {
        return new LinkedBlockingDeque<Object>();
    } else if (List.class.equals(collectionType)) {
        return new LinkedList<Object>();
    } else if (Set.class.equals(collectionType)) {
        return new HashSet<Object>();
    } else if (Queue.class.equals(collectionType)) {
        return new PriorityQueue<Object>();
    } else if (Deque.class.equals(collectionType)) {
        return new ArrayDeque<Object>();
    } else if (Collection.class.equals(collectionType)) {
        return new LinkedList<Object>();
    }
    throw new IllegalArgumentException("Unsupported collection type: " + collectionType);
}

From source file:io.stallion.jobs.JobCoordinator.java

private JobCoordinator() {
    queue = new PriorityBlockingQueue<>();
    BasicThreadFactory factory = new BasicThreadFactory.Builder()
            .namingPattern("stallion-job-execution-thread-%d").build();
    // Create an executor service for single-threaded execution
    pool = Executors.newFixedThreadPool(25, factory);
    registeredJobs = new HashSet<>();
}

From source file:org.apache.tez.dag.app.rm.LocalTaskSchedulerService.java

public LocalTaskSchedulerService(TaskSchedulerAppCallback appClient,
        ContainerSignatureMatcher containerSignatureMatcher, String appHostName, int appHostPort,
        String appTrackingUrl, AppContext appContext) {
    super(LocalTaskSchedulerService.class.getName());
    this.realAppClient = appClient;
    this.appCallbackExecutor = createAppCallbackExecutorService();
    this.containerSignatureMatcher = containerSignatureMatcher;
    this.appClientDelegate = createAppCallbackDelegate(appClient);
    this.appHostName = appHostName;
    this.appHostPort = appHostPort;
    this.appTrackingUrl = appTrackingUrl;
    this.appContext = appContext;
    taskRequestQueue = new PriorityBlockingQueue<TaskRequest>();
    taskAllocations = new LinkedHashMap<Object, Container>();
}

From source file:org.apache.hadoop.hbase.regionserver.CompactSplitThread.java

/** @param server */
CompactSplitThread(HRegionServer server) {
    super();//from   ww w . j av a  2 s .c o  m
    this.server = server;
    this.conf = server.getConfiguration();
    this.regionSplitLimit = conf.getInt("hbase.regionserver.regionSplitLimit", Integer.MAX_VALUE);

    int largeThreads = Math.max(1, conf.getInt("hbase.regionserver.thread.compaction.large", 1));
    int smallThreads = conf.getInt("hbase.regionserver.thread.compaction.small", 1);

    int splitThreads = conf.getInt("hbase.regionserver.thread.split", 1);

    // if we have throttle threads, make sure the user also specified size
    Preconditions.checkArgument(largeThreads > 0 && smallThreads > 0);

    final String n = Thread.currentThread().getName();

    this.longCompactions = new ThreadPoolExecutor(largeThreads, largeThreads, 60, TimeUnit.SECONDS,
            new PriorityBlockingQueue<Runnable>(), new ThreadFactory() {
                @Override
                public Thread newThread(Runnable r) {
                    Thread t = new Thread(r);
                    t.setName(n + "-longCompactions-" + System.currentTimeMillis());
                    return t;
                }
            });
    this.longCompactions.setRejectedExecutionHandler(new Rejection());
    this.shortCompactions = new ThreadPoolExecutor(smallThreads, smallThreads, 60, TimeUnit.SECONDS,
            new PriorityBlockingQueue<Runnable>(), new ThreadFactory() {
                @Override
                public Thread newThread(Runnable r) {
                    Thread t = new Thread(r);
                    t.setName(n + "-shortCompactions-" + System.currentTimeMillis());
                    return t;
                }
            });
    this.shortCompactions.setRejectedExecutionHandler(new Rejection());
    this.splits = (ThreadPoolExecutor) Executors.newFixedThreadPool(splitThreads, new ThreadFactory() {
        @Override
        public Thread newThread(Runnable r) {
            Thread t = new Thread(r);
            t.setName(n + "-splits-" + System.currentTimeMillis());
            return t;
        }
    });
    int mergeThreads = conf.getInt("hbase.regionserver.thread.merge", 1);
    this.mergePool = (ThreadPoolExecutor) Executors.newFixedThreadPool(mergeThreads, new ThreadFactory() {
        @Override
        public Thread newThread(Runnable r) {
            Thread t = new Thread(r);
            t.setName(n + "-merges-" + System.currentTimeMillis());
            return t;
        }
    });
}

From source file:org.freewheelschedule.freewheel.config.ServerConfig.java

@Bean
public PriorityBlockingQueue<Trigger> repeatingQueue() {
    return new PriorityBlockingQueue<Trigger>();
}

From source file:org.freewheelschedule.freewheel.config.ServerConfig.java

@Bean
public PriorityBlockingQueue<Trigger> timedQueue() {
    return new PriorityBlockingQueue<Trigger>();
}

From source file:com.pari.nm.modules.jobs.PcbImportJob.java

@Override
public void execute(JobExecutionContext context) throws JobExecutionException {

    long t = System.currentTimeMillis();
    // to give enough time for client to register for the jobstatus messages
    try {//  w  ww  . j  a  va  2 s  . co m
        Thread.sleep(5000);
    } catch (Exception ee) {
    }
    logger = PariLoggerFactory.getLogger(Constants.NM_LOGGER);
    JobDetail job = context.getJobDetail();
    jobId = InventoryDBHelper.getJobId(job.getName(), job.getGroup());
    jobRunId = job.getJobDataMap().getInt("jobrunid");
    keys = (String[]) job.getJobDataMap().get("PCBSelectedDevices");
    int numDevs = (keys == null) ? 0 : keys.length;
    pcbFileName = (String) job.getJobDataMap().get("PCBFileName");
    grpName = (String) job.getJobDataMap().get("GroupName");
    login = (String) job.getJobDataMap().get("login");
    uId = ((Integer) job.getJobDataMap().get("userId")).intValue();
    customerId = ((Integer) job.getJobDataMap().get("CustomerID")).intValue();
    wingInstanceName = (String) job.getJobDataMap().get("WingInstanceName");
    custWingUniqueId = customerId + "_" + wingInstanceName;
    int row_id = -1;
    int version = -1;
    Key key = null;
    Map<String/* deivceIp */, PerDeviceImportStatus> perDeviceImportStatus = null;
    Map<String/* deivceIp */, PerDeviceConfigBackupStatus> perDeviceConfigBackupStatus = null;
    nccmJobId = ((Integer) job.getJobDataMap().get("nccmJobId")).intValue();
    nccmJobRunId = ((Integer) job.getJobDataMap().get("nccmJobRunId")).intValue();
    profileName = (String) job.getJobDataMap().get("profileName");

    // Fix for Duplicate device issue with parallel device import jobs for same customer
    // For a customer, only 1 import job will run at a time and other jobs need to wait until previous
    // import jobs are done.
    if (currentCustomerJobTokens.containsKey(custWingUniqueId)) {
        token = currentCustomerJobTokens.get(custWingUniqueId).incrementAndGet();
        logger.debug("The token for customer " + customerId + " is " + token);
        runnableTokens.get(custWingUniqueId).offer(token);
    } else {
        AtomicInteger retVal = currentCustomerJobTokens.putIfAbsent(custWingUniqueId, new AtomicInteger(0));
        // This can happen first time if two threads simultaneously try to put value in currentCustomerJobTokens
        // If one thread puts the value, other thread will get some return value...
        if (retVal != null) {
            token = currentCustomerJobTokens.get(custWingUniqueId).incrementAndGet();
            runnableTokens.get(custWingUniqueId).offer(token);
        } else {
            PriorityBlockingQueue<Integer> pbq = new PriorityBlockingQueue<Integer>();
            runnableTokens.put(custWingUniqueId, pbq);
            pbq.offer(token);
        }
        currentCustomerRunningToken.put(custWingUniqueId, -1);
        logger.debug("The token for customer " + customerId + " is " + token);
    }

    if (runnableTokens.get(custWingUniqueId) != null) {
        if (currentCustomerRunningToken.get(custWingUniqueId) == -1) {
            currentCustomerRunningToken.put(custWingUniqueId, runnableTokens.get(custWingUniqueId).peek());
        }

        if (runnableTokens.get(custWingUniqueId).peek() != currentCustomerRunningToken.get(custWingUniqueId)) {
            logMsg("Waiting for previous VSEM Import Jobs on same customer to finish.");

            while (runnableTokens.get(custWingUniqueId).peek() != currentCustomerRunningToken
                    .get(custWingUniqueId)) {
                try {
                    Thread.sleep(1 * 60 * 1000);
                } catch (Exception e) {
                    logger.error("Error while waiting for VSEM Jobs to finish...", e);
                }
            }
        }

        // removing the head of queue
        runnableTokens.get(custWingUniqueId).poll();
    }

    try {
        String zipFileName = job.getJobDataMap().getString("ZIPFILE");
        PCBFileIf pcbFile = null;
        if (customerId >= 0) {
            row_id = ServerDBHelper.insertPcbImportLog(customerId, System.currentTimeMillis(),
                    "NCCM Device file import is in progress.", wingInstanceName);
            try {
                processCustomerAndInstance(row_id);
            } catch (Exception ex) {
                logger.error("Exception while processing customer instance information", ex);
                return;
            }
        }
        if (zipFileName != null) {
            logger.debug("Processing zip file:" + zipFileName);
            PcbImportJobStatus jobStatus = new PcbImportJobStatus(jobId, jobRunId, 10, "Processing zip file.",
                    JobStatus.RUNNING);
            ClientSessionManager.getInstance().sendJobStatusMessages(jobId, jobStatus);

            ZIPImportListener zipListener = new ZIPImportListener();
            logger.debug("processing zip file : " + zipFileName + " Job: " + jobId + " Runid: " + jobRunId);
            ZIPProcessor zipProcessor = ZIPProcessor.open(new File(zipFileName), zipListener, customerId);
            try {
                while (zipListener.inProgress) {
                    Thread.sleep(3000);
                }
                String err = zipProcessor.getErrorMessage();
                if (err != null && !err.isEmpty()) {
                    context.setResult("Completed");
                    JobRun.logJobCompletionStatus(jobId, jobRunId, false);
                    jobStatus = new PcbImportJobStatus(jobId, jobRunId, 100, err, JobStatus.FAILED);
                    logMsg(err); // CSCua41383
                    // CSCua44590: Customer upload summary shows wrong msg when an invalid file is imported
                    ServerDBHelper.updatePcbImportLog(customerId, row_id, err);
                    ClientSessionManager.getInstance().sendJobStatusMessages(jobId, jobStatus);
                    return;
                }
                String[] deviceNames = zipProcessor.getDeviceNames();
                int numZipDevs = (deviceNames == null) ? 0 : deviceNames.length;
                ZIPImportFileResultMsg msg = new ZIPImportFileResultMsg(jobId, jobRunId, 10,
                        "ZIP Import successful", zipProcessor.getFileResultMap(), zipProcessor.getNumEntries(),
                        numZipDevs, zipProcessor.getErrorMessage(), zipProcessor.getWarningMessage());
                ClientSessionManager.getInstance().sendJobStatusMessages(jobId, msg);
                if (deviceNames == null || numZipDevs == 0) {
                    context.setResult("Completed");
                    JobRun.logJobCompletionStatus(jobId, jobRunId, false);
                    // CSCua44590: Updated the Displaying Message.
                    ServerDBHelper.updatePcbImportLog(customerId, row_id,
                            "Unable to find any valid devices in the zip file.");
                    jobStatus = new PcbImportJobStatus(jobId, jobRunId, 100,
                            "Unable to find any valid devices in the zip file.", JobStatus.FAILED);
                    logMsg("Unable to find any valid devices in the zip file."); // CSCua41383
                    ClientSessionManager.getInstance().sendJobStatusMessages(jobId, jobStatus);
                    return;
                }
                jobStatus = new PcbImportJobStatus(jobId, jobRunId, 10,
                        "Processed ZIP file. Found out " + numZipDevs + " devices from "
                                + zipProcessor.getNumEntries() + " files. Converting to VSEM file.",
                        JobStatus.RUNNING);
                ClientSessionManager.getInstance().sendJobStatusMessages(jobId, jobStatus);

                logger.debug("Zip processing done for file: " + zipFileName + " Job: " + jobId + " Runid: "
                        + jobRunId);
                List<HeuristicDescriptor> heus = HeuristicManager.getInstance()
                        .getHeuristics(PCBFile.DEVICE_FAMILY);
                File f = new File(zipFileName);
                File f1 = f.getParentFile();
                File f2 = new File(f1, "TEMP");
                f2.mkdir();
                File f3 = new File(f2, "PCB_" + System.nanoTime());
                f3.mkdir();
                pcbFileName = f3.getAbsolutePath();
                ZIP2PCBv2Converter converter = new ZIP2PCBv2Converter();
                converter.exportToVSEMFile(zipProcessor, f3, null, heus);
                logger.debug("Exported to VSEM file");
                jobStatus = new PcbImportJobStatus(jobId, jobRunId, 10, "Exported "
                        + zipProcessor.getDeviceNames().length + " devices to VSEM file. Starting import...",
                        JobStatus.RUNNING);
                ClientSessionManager.getInstance().sendJobStatusMessages(jobId, jobStatus);
                version = 2;
                f.delete(); // CSCtz18817
            } finally {
                zipProcessor.cleanup();
            }
        } else {
            key = getKey(customerId);
            logger.debug("Checking PCB Version for file: " + pcbFileName);
            try {
                version = getPCBFileVersion(pcbFileName, key);
            } catch (Exception ex) {
                // CSCtz03087:Job State not getting populated properly in Inventory job logs
                context.setResult("Completed");
                JobRun.logJobCompletionStatus(jobId, jobRunId, false);
                PcbImportJobStatus jobStatus = new PcbImportJobStatus(jobId, jobRunId, 100,
                        "Not a valid PCB/VSEM File", JobStatus.FAILED);
                logMsg("Not a valid PCB/VSEM File"); // CSCua41383
                // CSCua44590: Customer upload summary shows wrong msg when an invalid file is imported
                ServerDBHelper.updatePcbImportLog(customerId, row_id, "Not a valid PCB/VSEM File");
                ClientSessionManager.getInstance().sendJobStatusMessages(jobId, jobStatus);
                return;
            }
            logger.debug("Found out PCB Version in pcb File: " + pcbFileName + " to be " + version);
        }
        PcbImportJobMsg pcbImMsg = null;
        if (version == 1) {
            pcbFile = processPcbV1(key);
            logger.debug("Job: " + jobId + " Number of thread for PCB import: " + noThreads);
            pcbImMsg = new PcbImportJobMsg(keys, pcbFile, customerId, grpName, wingInstanceName, this, row_id);
            numDevs = (keys == null) ? 0 : keys.length;
            Messenger.getInstance().publish(MessageTypes.TRIGGER_PCBIMPORT, pcbImMsg);
        } else {
            try {
                String login = "Unknown " + uId;
                UserDetails user = UsersFactory.getUser(uId);
                if (user != null) {
                    login = user.getLogin();
                }
                // Pass jobId also so that DeviceAdder where device list is prepared for custom reports can maintain
                // map of jobId and device list. This will make sure things work fine in case of parallel vsem
                // imports.

                UserDetails historyUserObj = UsersFactory.getUser(InventoryDBHelper.getJobCreatorId(nccmJobId));
                String historyUser = login;

                if (historyUserObj != null)
                    historyUser = historyUserObj.getLogin();

                VSEMImporter vsemImporter = new VSEMImporter(pcbFileName, customerId, wingInstanceName, grpName,
                        this, row_id, login, jobId, new JobParameters(jobId, nccmJobId, historyUser));
                try {
                    vsemImporter.importVsem();
                } catch (Exception expr) {
                    context.setResult("Completed");
                    JobRun.logJobCompletionStatus(jobId, jobRunId, false);
                    ServerDBHelper.updatePcbImportLog(customerId, row_id, expr.getMessage());
                    PcbImportJobStatus jobStatus = new PcbImportJobStatus(jobId,
                            job.getJobDataMap().getInt("jobrunid"), 10, "Validate Licenses.",
                            JobStatus.RUNNING);
                    jobStatus = new PcbImportJobStatus(jobId, job.getJobDataMap().getInt("jobrunid"), 100,
                            expr.getMessage(), JobStatus.FAILED);
                    logMsg("No sufficient license found." + expr.getMessage());
                    ClientSessionManager.getInstance().sendJobStatusMessages(jobId, jobStatus);
                    return;
                }
                numDevs = vsemImporter.getNumberOfDevices();
                perDeviceImportStatus = vsemImporter.getPerDeviceImportStatus();
                perDeviceConfigBackupStatus = vsemImporter.getPerDeviceConfigBackupStatus();
                /*XMLImporter xmlImporter = new XMLImporter(pcbFileName, customerId, wingInstanceName, grpName, this,
                    row_id, login, jobId);
                xmlImporter.importXml();
                numDevs = xmlImporter.getNumberOfDevices();
                perDeviceImportStatus = xmlImporter.getPerDeviceImportStatus();
                perDeviceConfigBackupStatus = xmlImporter.getPerDeviceConfigBackupStatus();*/
                noThreads = 0;
            } catch (Throwable ex) {
                logger.error("Exception while importing from VSEM File: " + pcbFile, ex);
            }
        }
        if (noThreads > numDevs) {
            noThreads = numDevs;
        }

        int size = numDevs;
        System.out.println("Number of devices = " + size);
        while (noThreads > 0) {
            try {
                Thread.sleep(2000);
                logger.debug("Job: " + jobId + ". " + noThreads + " more to go.");
            } catch (Exception ee) {
            }
        }
        DeviceImportEventHandler evt = new DeviceImportEventHandler();
        if (virtsAdded.size() > 0) {
            Iterator<VirtualDeviceRefresh> it = virtsAdded.iterator();
            while (it.hasNext()) {
                VirtualDeviceRefresh deviceAddedMsg = it.next();
                try {
                    DeviceManager.getInstance().processDevice(MessageTypes.VIRUAL_DEVICE_DISCOVERED,
                            deviceAddedMsg);
                    Messenger.getInstance().publish(MessageTypes.DEVICE_CONFIG_CHANGED,
                            deviceAddedMsg.getNodeId() + "");
                    try {
                        ServerAuditLog.getInstance().logAudit(login, ServerAuditConstants.DEVICE_MANAGEMENT,
                                ServerAuditConstants.DEVICE_MANAGEMENT_DEVICE_STATE,
                                "Virtual Device (" + deviceAddedMsg.getDeviceName() + ") added.", -1, -1);
                    } catch (Exception ee) {
                    }
                } catch (Exception ex) {
                    ex.printStackTrace();
                }
            }
        }
        if (version == 1) {
            // look for voip phones
            String voipList = pcbFile.getAttributeValue("ExtendedAttributes", "VoipList");
            if (voipList != null) {
                populateVoIPPhones(voipList);
            }
        }

        context.setResult("Completed");
        if (state != JobStatus.FAILED) {
            state = JobStatus.SUCCESS;
        }
        JobRun.logJobCompletionStatus(jobId, jobRunId, (state == JobStatus.SUCCESS ? true : false));
        PcbImportJobStatus jobStatus = new PcbImportJobStatus(jobId, jobRunId, 100, "Completed", state);
        logger.error("Finished executing job: " + jobId + " in " + ((System.currentTimeMillis() - t) / 1000)
                + " secs");
        ClientSessionManager.getInstance().sendJobStatusMessages(jobId, jobStatus);
        ServerAuditLog.getInstance().logAudit(login, ServerAuditConstants.DEVICE_MANAGEMENT,
                ServerAuditConstants.DEVICE_MANAGEMENT_DISCOVERY, "Pcb/Zip Import Task Triggered.", jobId,
                jobRunId);
        ServerDBHelper.updatePcbImportLog(customerId, row_id,
                (state == 1) ? "NCCM Device File import partially successfull."
                        : "Successfully imported the NCCM Device File .");
        setDeviceImportEventParameters(job, numDevs, row_id, evt);
        evt.setJobStatus((state == JobStatus.SUCCESS) ? "Success" : "Failed");
        evt.setPerDeviceImportStatus(perDeviceImportStatus);
        EventManager.getInstance().sendEvent(evt);

        if (perDeviceConfigBackupStatus != null && !perDeviceConfigBackupStatus.isEmpty()) {
            ConfigBackupEventHandler cfgbkpEvt = new ConfigBackupEventHandler();
            cfgbkpEvt.setJobId(jobId);
            cfgbkpEvt.setJobRunId(jobRunId);
            cfgbkpEvt.setJobName(job.getName());
            cfgbkpEvt.setCount(perDeviceConfigBackupStatus.size());
            cfgbkpEvt.setUserId(uId);
            cfgbkpEvt.setPerDeviceConfigBackupStatus(perDeviceConfigBackupStatus);
            cfgbkpEvt.setJobStatus((state == JobStatus.SUCCESS) ? "Success" : "Failed");
            EventManager.getInstance().sendEvent(cfgbkpEvt);
        }
    } catch (Exception ee) {
        ServerDBHelper.updatePcbImportLog(customerId, row_id, ee.getMessage());
        ee.printStackTrace();
        logger.error("Error while importing PCB File", ee);
        logMsg("Error while importing PCB File: " + ee.getMessage()); // CSCtx75737
        context.setResult("Completed"); // CSCtx75737 - Setting State of the Job.
        JobRun.logJobCompletionStatus(jobId, jobRunId, false);
        PcbImportJobStatus jobStatus = new PcbImportJobStatus(jobId, jobRunId, 100, "Completed",
                JobStatus.FAILED);
        ClientSessionManager.getInstance().sendJobStatusMessages(jobId, jobStatus);
        DeviceImportEventHandler evt = new DeviceImportEventHandler();
        setDeviceImportEventParameters(job, numDevs, row_id, evt);
        evt.setJobStatus("Failed");
        evt.setStatus(Constants.PCB_IMPORT_FAIL);
        EventManager.getInstance().sendEvent(evt);

        if (perDeviceConfigBackupStatus != null && !perDeviceConfigBackupStatus.isEmpty()) {
            ConfigBackupEventHandler cfgbkpEvt = new ConfigBackupEventHandler();
            cfgbkpEvt.setJobId(jobId);
            cfgbkpEvt.setJobRunId(jobRunId);
            cfgbkpEvt.setJobName(job.getName());
            cfgbkpEvt.setCount(perDeviceConfigBackupStatus.size());
            cfgbkpEvt.setUserId(uId);
            cfgbkpEvt.setPerDeviceConfigBackupStatus(perDeviceConfigBackupStatus);
            cfgbkpEvt.setJobStatus("Failed");
            EventManager.getInstance().sendEvent(cfgbkpEvt);
        }
    } finally {
        try {
            // Invoke custom report engine after vsem import job is complete - generates report for devices in vsem
            // CustomReportHandler handler = CustomReportHandler.getInstance();
            // handler.generateReport();
            CustomReportJobDetails jobDetails = new CustomReportJobDetails();
            jobDetails.setJobDesc("Job triggered due to device import");
            jobDetails.setVsemFileName(pcbFileName, customerId);
            jobDetails.setVsemImportJobId(jobId);
            List<ScriptInfo> list = ReportDefManagerImpl.getInstance().getBasicScriptInfoByType(ScriptType.Top,
                    customerId);

            // if a vsem file is being imported for a customer, then all the scripts which belong to him as well as
            // the scripts which belong to all customers should be executed.
            if (customerId != ICEntity.ALL_CUSTOMER_ID) {
                list.addAll(ReportDefManagerImpl.getInstance().getBasicScriptInfoByType(ScriptType.Top,
                        ICEntity.ALL_CUSTOMER_ID));
            }

            // customerId);
            Set<String> devices = new HashSet<String>();
            StringBuilder devIds = new StringBuilder();
            // Get unique list of devices associated with all the scripts.
            for (ScriptInfo script : list) {
                Set<Device> devSet = NCCMCustomReportHandler.getInstance().getDeviceList(script, false, jobId);
                for (Device dev : devSet) {
                    devices.add(dev.getDeviceID());
                }
            }

            int i = 0;
            for (String device : devices) {
                devIds.append("D").append(device);
                if (i < (devices.size() - 1)) {
                    devIds.append(",");
                }
                i++;
            }

            if (list.size() > 0) {
                jobDetails.setDevices(devIds.toString());
                JobMgr.getInstance().scheduleRunNowCustomReportJob(uId, jobDetails);
            }

            if (pcbFileName != null) {
                pushToBackupServer();
            }
        } catch (Exception ee) {
            ee.printStackTrace();
        } finally {
            if (runnableTokens.get(custWingUniqueId) != null) {
                // Remove from currentCustomerJob if all threads have finished
                if (runnableTokens.get(custWingUniqueId).isEmpty()) {
                    currentCustomerJobTokens.remove(custWingUniqueId);
                } else {
                    // Getting and setting the latest token to the current customer.
                    currentCustomerRunningToken.put(custWingUniqueId,
                            runnableTokens.get(custWingUniqueId).peek());
                }
            }
        }
    }
}

From source file:oculus.aperture.graph.aggregation.impl.ModularityAggregator.java

@Override
public void run() {

    logger.debug("Running kSnap clustering algorithm on " + nodeMap.size() + " nodes and " + linkMap.size()
            + " links...");

    StopWatch stopWatch = new StopWatch();
    stopWatch.start();//w  w w  .j a v a2  s . c  o  m
    HashMap<String, ModularityNode> linklookup = new HashMap<String, ModularityAggregator.ModularityNode>();

    for (Node n : nodeMap.values()) {
        ModularityNode mn = new ModularityNode(n);
        linklookup.put(n.getId(), mn);
        groups.add(mn);

    }
    links = new ArrayList<ModularityLink>();

    for (Link l : linkMap.values()) {
        if (linklookup.containsKey(l.getSourceId()) && linklookup.containsKey(l.getTargetId())) {
            //if this is not true we have links pointing to an invalid node...
            ModularityLink ml = new ModularityLink(linklookup.get(l.getSourceId()),
                    linklookup.get(l.getTargetId()));
            links.add(ml);

            ModularityNode start = linklookup.get(l.getSourceId());
            ModularityNode end = linklookup.get(l.getSourceId());
            start.addLink(ml);
            end.addLink(ml);
        }

    }

    boolean notterminate = true;

    int linksize;

    while (notterminate) {
        final List<Future<?>> futures = new ArrayList<Future<?>>();
        notterminate = false;
        final PriorityBlockingQueue<ModularityLink> linksort = new PriorityBlockingQueue<ModularityLink>();
        linksize = links.size();
        final int itrsize = linksize / nThreads;
        for (int i = 0; i < nThreads; i++) {

            final int passval = i;

            Future<?> foo = executor.submit(new Callable<Boolean>() {
                @Override
                public Boolean call() throws Exception {
                    boolean nt = false;
                    for (int lnknum = 0; lnknum < itrsize; lnknum++) {
                        ModularityLink ln = links.get(passval * itrsize + lnknum);
                        long nc = 0;
                        if (ln.source.neighbourcounts.containsKey(ln.target)) {
                            nc = ln.source.neighbourcounts.get(ln.target).intValue();
                        } else {
                            System.out.println("Oooops");
                        }

                        long q = nc - (ln.source.totalvolume * ln.target.totalvolume) / 2;

                        if (q > 0)
                            nt = true;
                        ln.q.set(q);
                        linksort.add(ln);
                    }
                    return nt;
                }
            });

            futures.add(foo);

        }

        for (Future<?> foo : futures) {
            try {
                notterminate = (Boolean) foo.get();
            } catch (InterruptedException interruptedCancellingAndSignalling) {
                Thread.currentThread().interrupt();
            } catch (ExecutionException wtf) {
                wtf.printStackTrace();
            }
        }

        if (!notterminate)
            break;
        //Now we take each link in the queue and add it to maximal matching 
        ConcurrentLinkedQueue<ModularityLink> maximalmatching = new ConcurrentLinkedQueue<ModularityAggregator.ModularityLink>();
        ConcurrentSkipListSet<ModularityNode> vertexcheck = new ConcurrentSkipListSet<ModularityAggregator.ModularityNode>();
        ModularityLink top = linksort.poll();
        maximalmatching.add(top);
        vertexcheck.add(top.source);
        vertexcheck.add(top.target);
        while (!linksort.isEmpty()) {
            ModularityLink nlnk = linksort.poll();
            if (nlnk.q.intValue() < 0)
                continue;

            if (vertexcheck.contains(nlnk.source) || vertexcheck.contains(nlnk.target))
                continue;
            maximalmatching.add(nlnk);
            vertexcheck.add(nlnk.source);
            vertexcheck.add(nlnk.target);
        }

        //Now we take all the pairs in maximal matching and fuse them
        for (ModularityLink ln : maximalmatching) {
            ModularityNode so = ln.source;
            ModularityNode tr = ln.target;
            so.assimilate(tr);
            groups.remove(tr);

            links.remove(ln);
        }
        linksize = links.size();
        if (linksize == 1)
            notterminate = false;
    }

    /*
    final List<Future<?>> futures = new ArrayList<Future<?>>();
            
    Future<?> foo = executor.submit(new Runnable(){
            
       @Override
       public void run() {
            
       }});
            
    futures.add(foo);
    */
    clusterSet = new ArrayList<Set<Node>>();

    for (ModularityNode g : groups) {

        if (cancel) {
            setStatusWaiting();
            return;
        }
        Set<Node> set = new HashSet<Node>();
        clusterSet.add(set);

        for (Node n : g.nodes) {

            if (cancel) {
                setStatusWaiting();
                return;
            }

            set.add(n);

        }

    }
    if (clusterer != null) {
        graphResult = clusterer.convertClusterSet(clusterSet);
    }
    stopWatch.stop();
    System.out.println("Finished Modularity clustering algorithm.");
    System.out.println("Algorithm took " + stopWatch.toString());//30 = 33.487
    stopWatch.reset();
    this.result = result;
}

From source file:org.mitre.eren.model.ModelManager.java

public ModelManager(String[] args) {
    myQueue = new LinkedBlockingQueue<Element>();

    // parse the arguments
    int serverPort = 3737;
    String endpoint = "http://erenbus-gertner.mitre.org:3732/";

    try {/* ww  w. ja  v a  2 s  .  c om*/
        Options options = new Options();

        options.addOption("p", "port", true,
                "Server port to listen on for incoming HTTP messages from the bus");
        options.addOption("u", "url", true, "Outbound URL to post HTTP messages to");
        options.addOption("l", "log", true, "Path to logfiles");
        options.addOption("s", "scale", true,
                "Population scale factor.  The populations from the scenario file are divided by this number");
        options.addOption("c", "class", true, "this is ignored");
        options.addOption("h", "help", false, "Prints this help message");

        CommandLineParser parser = new PosixParser();
        CommandLine cmd = parser.parse(options, args, true);

        if (cmd.hasOption("h")) {
            HelpFormatter hf = new HelpFormatter();
            hf.printHelp("ERENModel.jar", options);
            System.exit(1);
        }
        if (cmd.hasOption("p")) {
            serverPort = Integer.parseInt(cmd.getOptionValue("p"));
        }
        if (cmd.hasOption("u")) {
            endpoint = cmd.getOptionValue("u");
        }
        if (cmd.hasOption("s")) {
            scaleFactor = Integer.parseInt(cmd.getOptionValue("s"));
        }
        if (cmd.hasOption("l")) {
            String logfile = cmd.getOptionValue("l");
            FileHandler fh;
            try {
                fh = new FileHandler(logfile + "model%g.log", 10000000, 5);
                log.addHandler(fh);
            } catch (SecurityException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
        }
    } catch (ParseException e) {
        // TODO Auto-generated catch block
        log.log(Level.WARNING, "Caught Exception", e);
    }

    // Start the server
    messageProcessor = new InboundModelHttpController(this);
    //        InboundHttpEndpoint server = new InboundHttpEndpoint(serverPort, new InboundMessageHandler(messageProcessor));
    //        Thread st = new Thread(server, "EREN HTTP Inbound Endpoint");
    //        //st.setDaemon(true);
    //        st.start();

    // build the client
    this.delayedEventQueue = new PriorityBlockingQueue<DelayedEvent>();
    this.eventQueue = new LinkedBlockingQueue<Event>();
    Thread qt = new Thread("EREN Model Event Queue") {
        @Override
        public void run() {
            while (true) {
                try {
                    Event e = eventQueue.take();
                    e.getStateMachine().fireEvent(e.getEventName());
                } catch (InterruptedException e) {
                }
            }
        }
    };
    qt.start();

    //        client = new OutboundHttpEndpoint(endpoint);
    //        client.registerExtension(new EDXLRMExtensionFactory());
    //        client.registerExtension(new StartupExtensionFactory());
    resourceHolder = new ResourceHolder(client, sender);
}

From source file:codeswarm.code_swarm.java

/**
 * Initialisation//from www .j av  a  2 s.c  o  m
 */
public void setup() {
    width = cfg.getIntProperty(CodeSwarmConfig.WIDTH_KEY, 640);
    if (width <= 0) {
        width = 640;
    }

    height = cfg.getIntProperty(CodeSwarmConfig.HEIGHT_KEY, 480);
    if (height <= 0) {
        height = 480;
    }

    maxBackgroundThreads = cfg.getIntProperty(CodeSwarmConfig.MAX_THREADS_KEY, 4);
    if (maxBackgroundThreads <= 0) {
        maxBackgroundThreads = 4;
    }
    backgroundExecutor = new ThreadPoolExecutor(1, maxBackgroundThreads, Long.MAX_VALUE, TimeUnit.NANOSECONDS,
            new ArrayBlockingQueue<Runnable>(4 * maxBackgroundThreads),
            new ThreadPoolExecutor.CallerRunsPolicy());

    if (cfg.getBooleanProperty(CodeSwarmConfig.USE_OPEN_GL, false)) {
        size(width, height, OPENGL);
    } else {
        size(width, height);
    }

    showLegend = cfg.getBooleanProperty(CodeSwarmConfig.SHOW_LEGEND, false);
    showHistogram = cfg.getBooleanProperty(CodeSwarmConfig.SHOW_HISTORY, false);
    showDate = cfg.getBooleanProperty(CodeSwarmConfig.SHOW_DATE, false);
    showEdges = cfg.getBooleanProperty(CodeSwarmConfig.SHOW_EDGES, false);
    showDebug = cfg.getBooleanProperty(CodeSwarmConfig.SHOW_DEBUG, false);
    takeSnapshots = cfg.getBooleanProperty(CodeSwarmConfig.TAKE_SNAPSHOTS_KEY, false);
    drawNamesSharp = cfg.getBooleanProperty(CodeSwarmConfig.DRAW_NAMES_SHARP, true);
    drawNamesHalos = cfg.getBooleanProperty(CodeSwarmConfig.DRAW_NAMES_HALOS, false);
    drawFilesSharp = cfg.getBooleanProperty(CodeSwarmConfig.DRAW_FILES_SHARP, false);
    drawFilesFuzzy = cfg.getBooleanProperty(CodeSwarmConfig.DRAW_FILES_FUZZY, true);
    drawFilesJelly = cfg.getBooleanProperty(CodeSwarmConfig.DRAW_FILES_JELLY, false);
    background = cfg.getBackground().getRGB();

    UPDATE_DELTA = cfg.getIntProperty(CodeSwarmConfig.MSEC_PER_FRAME_KEY, -1);
    if (UPDATE_DELTA == -1) {
        int framesperday = cfg.getIntProperty(CodeSwarmConfig.FRAMES_PER_DAY_KEY, 4);
        if (framesperday > 0) {
            UPDATE_DELTA = (86400000 / framesperday);
        }
    }
    if (UPDATE_DELTA <= 0) {
        // Default to 4 frames per day.
        UPDATE_DELTA = 21600000;
    }

    isInputSorted = cfg.getBooleanProperty(CodeSwarmConfig.IS_INPUT_SORTED_KEY, false);

    /**
     * This section loads config files and calls the setup method for all physics engines.
     */

    physicsEngineConfigDir = cfg.getStringProperty(CodeSwarmConfig.PHYSICS_ENGINE_CONF_DIR, "physics_engine");
    File f = new File(physicsEngineConfigDir);
    String[] configFiles = null;
    if (f.exists() && f.isDirectory()) {
        configFiles = f.list();
    }
    for (int i = 0; configFiles != null && i < configFiles.length; i++) {
        if (configFiles[i].endsWith(".config")) {
            Properties p = new Properties();
            String ConfigPath = physicsEngineConfigDir + System.getProperty("file.separator") + configFiles[i];
            try {
                p.load(new FileInputStream(ConfigPath));
            } catch (FileNotFoundException e) {
                e.printStackTrace();
                System.exit(1);
            } catch (IOException e) {
                e.printStackTrace();
                System.exit(1);
            }
            String ClassName = p.getProperty("name", "__DEFAULT__");
            if (!ClassName.equals("__DEFAULT__")) {
                PhysicsEngine pe = getPhysicsEngine(ClassName);
                pe.setup(p);
                mPhysicsEngineChoices.add(pe);
            } else {
                logger.error("Skipping config file '" + ConfigPath
                        + "'.  Must specify class name via the 'name' parameter.");
                System.exit(1);
            }
        }
    }

    if (mPhysicsEngineChoices.size() == 0) {
        logger.error("No physics engine config files found in '" + physicsEngineConfigDir + "'.");
        System.exit(1);
    }

    // Physics engine configuration and instantiation
    physicsEngineSelection = cfg.getStringProperty(CodeSwarmConfig.PHYSICS_ENGINE_SELECTION,
            PHYSICS_ENGINE_LEGACY);

    for (PhysicsEngine p : mPhysicsEngineChoices) {
        if (physicsEngineSelection.equals(p.getClass().getName())) {
            mPhysicsEngine = p;
        }
    }

    if (mPhysicsEngine == null) {
        logger.error("No physics engine matches your choice of '" + physicsEngineSelection + "'. Check '"
                + physicsEngineConfigDir + "' for options.");
        System.exit(1);
    }

    smooth();
    frameRate(FRAME_RATE);

    // init data structures
    nodes = new CopyOnWriteArrayList<FileNode>();
    edges = new CopyOnWriteArrayList<Edge>();
    people = new CopyOnWriteArrayList<PersonNode>();
    history = new LinkedList<ColorBins>();

    if (isInputSorted) {
        //If the input is sorted, we only need to store the next few events
        eventsQueue = new ArrayBlockingQueue<FileEvent>(5000);
    } else {
        //Otherwise we need to store them all at once in a data structure that will sort them
        eventsQueue = new PriorityBlockingQueue<FileEvent>();
    }

    // Init color map
    initColors();

    loadRepEvents(cfg.getStringProperty(CodeSwarmConfig.INPUT_FILE_KEY)); // event formatted (this is the standard)
    synchronized (this) {
        while (!finishedLoading && eventsQueue.isEmpty()) {
            try {
                wait();
            } catch (InterruptedException e) {
                logger.error("The ready-check thread was interrupted", e);
            }
        }
    }
    prevDate = eventsQueue.peek().getDate();

    SCREENSHOT_FILE = cfg.getStringProperty(CodeSwarmConfig.SNAPSHOT_LOCATION_KEY);

    maxFramesSaved = (int) Math.pow(10, SCREENSHOT_FILE.replaceAll("[^#]", "").length());

    // Create fonts
    String fontName = cfg.getStringProperty(CodeSwarmConfig.FONT_KEY, "SansSerif");
    String fontNameBold = cfg.getStringProperty(CodeSwarmConfig.FONT_KEY_BOLD, "SansSerif");
    Integer fontSize = cfg.getIntProperty(CodeSwarmConfig.FONT_SIZE, 10);
    Integer fontSizeBold = cfg.getIntProperty(CodeSwarmConfig.FONT_SIZE_BOLD, 14);
    font = createFont(fontName, fontSize);
    boldFont = createFont(fontNameBold, fontSizeBold);

    textFont(font);

    // Create the file particle image
    sprite = loadImage(cfg.getStringProperty(CodeSwarmConfig.SPRITE_FILE_KEY, "particle.png"));
    // Add translucency (using itself in this case)
    sprite.mask(sprite);
}