Example usage for java.util.concurrent.atomic AtomicInteger AtomicInteger

List of usage examples for java.util.concurrent.atomic AtomicInteger AtomicInteger

Introduction

In this page you can find the example usage for java.util.concurrent.atomic AtomicInteger AtomicInteger.

Prototype

public AtomicInteger(int initialValue) 

Source Link

Document

Creates a new AtomicInteger with the given initial value.

Usage

From source file:com.yahoo.pulsar.broker.service.PersistentQueueE2ETest.java

@Test
public void testConsumersWithDifferentPermits() throws Exception {
    final String topicName = "persistent://prop/use/ns-abc/shared-topic4";
    final String subName = "sub4";
    final int numMsgs = 10000;

    final AtomicInteger msgCountConsumer1 = new AtomicInteger(0);
    final AtomicInteger msgCountConsumer2 = new AtomicInteger(0);
    final CountDownLatch latch = new CountDownLatch(numMsgs);

    int recvQ1 = 10;
    ConsumerConfiguration conf1 = new ConsumerConfiguration();
    conf1.setSubscriptionType(SubscriptionType.Shared);
    conf1.setReceiverQueueSize(recvQ1);//from  w w  w  . ja va  2 s .  c o m
    conf1.setMessageListener((consumer, msg) -> {
        msgCountConsumer1.incrementAndGet();
        try {
            consumer.acknowledge(msg);
            latch.countDown();
        } catch (PulsarClientException e) {
            fail("Should not fail");
        }
    });

    int recvQ2 = 1;
    ConsumerConfiguration conf2 = new ConsumerConfiguration();
    conf2.setSubscriptionType(SubscriptionType.Shared);
    conf2.setReceiverQueueSize(recvQ2);
    conf2.setMessageListener((consumer, msg) -> {
        msgCountConsumer2.incrementAndGet();
        try {
            consumer.acknowledge(msg);
            latch.countDown();
        } catch (PulsarClientException e) {
            fail("Should not fail");
        }
    });

    Consumer consumer1 = pulsarClient.subscribe(topicName, subName, conf1);
    Consumer consumer2 = pulsarClient.subscribe(topicName, subName, conf2);

    List<CompletableFuture<MessageId>> futures = Lists.newArrayListWithCapacity(numMsgs);
    Producer producer = pulsarClient.createProducer(topicName);
    for (int i = 0; i < numMsgs; i++) {
        String message = "msg-" + i;
        futures.add(producer.sendAsync(message.getBytes()));
    }
    FutureUtil.waitForAll(futures).get();
    producer.close();

    latch.await(5, TimeUnit.SECONDS);

    assertEquals(msgCountConsumer1.get(), numMsgs - numMsgs / (recvQ1 + recvQ2), numMsgs * 0.1);
    assertEquals(msgCountConsumer2.get(), numMsgs / (recvQ1 + recvQ2), numMsgs * 0.1);

    consumer1.close();
    consumer2.close();
    admin.persistentTopics().delete(topicName);
}

From source file:com.vmware.admiral.adapter.docker.service.DockerNetworkAdapterService.java

private void processCreateNetwork(RequestContext context, int retriesCount) {
    AssertUtil.assertNotNull(context.networkState, "networkState");
    AssertUtil.assertNotEmpty(context.networkState.name, "networkState.name");

    CommandInput createCommandInput = context.commandInput
            .withPropertyIfNotNull(DOCKER_CONTAINER_NETWORK_NAME_PROP_NAME, context.networkState.name);
    if (context.networkState.driver != null && !context.networkState.driver.isEmpty()) {
        createCommandInput.withProperty(DOCKER_CONTAINER_NETWORK_DRIVER_PROP_NAME, context.networkState.driver);
    } else {//from ww w.ja  v  a 2 s .  co  m
        createCommandInput.withProperty(DOCKER_CONTAINER_NETWORK_DRIVER_PROP_NAME, DOCKER_NETWORK_TYPE_DEFAULT);
    }

    if (context.networkState.options != null && !context.networkState.options.isEmpty()) {
        createCommandInput.withProperty(DOCKER_CONTAINER_NETWORK_OPTIONS_PROP_NAME,
                context.networkState.options);
    }

    if (context.networkState.ipam != null) {
        createCommandInput.withProperty(DOCKER_CONTAINER_NETWORK_IPAM_PROP_NAME,
                DockerAdapterUtils.ipamToMap(context.networkState.ipam));
    }

    context.executor.createNetwork(createCommandInput, (op, ex) -> {
        if (ex != null) {
            AtomicInteger retryCount = new AtomicInteger(retriesCount);
            if (RETRIABLE_HTTP_STATUSES.contains(op.getStatusCode())
                    && retryCount.getAndIncrement() < NETWORK_CREATE_RETRIES_COUNT) {
                // retry if failure is retriable
                logWarning("Create network %s failed with %s. Retries left %d", context.networkState.name,
                        Utils.toString(ex), NETWORK_CREATE_RETRIES_COUNT - retryCount.get());
                processCreateNetwork(context, retryCount.get());
            } else {
                fail(context.request, op, ex);
            }
        } else {
            @SuppressWarnings("unchecked")
            Map<String, Object> body = op.getBody(Map.class);

            context.networkState.id = (String) body.get(DOCKER_CONTAINER_NETWORK_ID_PROP_NAME);
            inspectAndUpdateNetwork(context);
            // transition to TaskStage.FINISHED is done later, after the network state gets
            // updated
        }
    });
}

From source file:org.mahasen.util.PutUtil.java

/**
 * @param file//from  www  . j  a v a2 s  . c o  m
 * @throws InterruptedException
 * @throws RegistryException
 * @throws PastException
 * @throws IOException
 */
public void secureUpload(File file, Id resourceId) throws InterruptedException, RegistryException,
        PastException, IOException, MahasenConfigurationException, MahasenException {

    // get the IP addresses pool to upload files.
    Vector<String> nodeIpsToPut = getNodeIpsToPut();

    MahasenFileSplitter mahasenFileSplitter = new MahasenFileSplitter();
    mahasenFileSplitter.split(file);
    HashMap<String, String> fileParts = mahasenFileSplitter.getPartNames();

    mahasenResource.addPartNames(fileParts.keySet().toArray(new String[fileParts.size()]));
    Random random = new Random();

    for (String currentPartName : fileParts.keySet()) {
        File splittedFilePart = new File(fileParts.get(currentPartName));
        int randomNumber = random.nextInt(nodeIpsToPut.size());
        String nodeIp = nodeIpsToPut.get(randomNumber);

        try {
            setTrustStore();
            URI uri = null;

            ArrayList<NameValuePair> qparams = new ArrayList<NameValuePair>();
            qparams.add(new BasicNameValuePair("splittedfilename", splittedFilePart.getName()));
            uri = URIUtils.createURI("https", nodeIp + ":" + MahasenConstants.SERVER_PORT, -1,
                    "/mahasen/upload_request_ajaxprocessor.jsp", URLEncodedUtils.format(qparams, "UTF-8"),
                    null);

            MahasenUploadWorker uploadWorker = new MahasenUploadWorker(uri, currentPartName, splittedFilePart,
                    mahasenResource, nodeIp);
            uploadThread = new Thread(uploadWorker);
            uploadWorker.setJobId(jobId);

            //keep track of uploading parts
            AtomicInteger noOfParts = new AtomicInteger(0);
            storedNoOfParts.put(jobId, noOfParts);

            uploadThread.start();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    final BlockFlag blockFlag = new BlockFlag(true, 6000);
    while (true) {

        AtomicInteger noOfParts = storedNoOfParts.get(jobId);
        if (noOfParts.get() == fileParts.size()) {
            storedNoOfParts.remove(uploadThread.getId());
            System.out.println("uploaded no of parts " + noOfParts + "out of " + fileParts.size() + "going out "
                    + "#####Thread id:" + uploadThread.getId());
            blockFlag.unblock();
            break;
        }

        if (blockFlag.isBlocked()) {
            mahasenManager.getNode().getEnvironment().getTimeSource().sleep(10);
        } else {
            throw new MahasenException("Time out in uploading " + file.getName());
        }
    }

    mahasenManager.insertIntoDHT(resourceId, mahasenResource, false);
    mahasenManager.insertTreeMapIntoDHT(resourceId, mahasenResource, false);

    ReplicateRequestStarter replicateStarter = new ReplicateRequestStarter(mahasenResource);
    Thread replicateThread = new Thread(replicateStarter);
    replicateThread.start();
}

From source file:com.moscona.dataSpace.DataSpace.java

/**
 * This method is called by the data store after the object was loaded from disk and before it returns so that the
 * data space can recover to a functional state with all transients in a reasonable shape.
 * DO NOT CALL unless you're part of the implementation (Java has no friends)
 * @param dataStore/*from  w ww .  j ava2s  .  c om*/
 * @param memoryManager
 */
public void initTransientsAfterRestore(IDataStore dataStore, IMemoryManager memoryManager)
        throws DataSpaceException {
    closeHelper = new CloseHelper();
    this.memoryManager = memoryManager;
    this.dataStore = dataStore;
    initNameSpaces(defaultPersistenceType);
    changesInProgress = new AtomicInteger(0);
    lastFlush = new AtomicLong(System.currentTimeMillis());
    // now we need to find all the vectors in the persistent data space and iterate over their segments and mark
    // them all as swapped out
    for (String name : persistentNameSpace.keySet()) {
        IDataElement element = persistentNameSpace.get(name);
        element.setNameSpace(persistentNameSpace);
        element.setPersistenceType(PersistenceType.PERSISTENT);
        if (AbstractVector.class.isAssignableFrom(element.getClass())) {
            AbstractVector vector = (AbstractVector) element;
            vector.initCloseHelper();
            vector.setDataSpace(this);
            vector.markAllSegmentsSwappedOut();
        }
    }
}

From source file:com.indeed.lsmtree.core.TestImmutableBTreeIndex.java

public void testSeekPrevious() throws Exception {
    final int[] ints = createTree();
    final ImmutableBTreeIndex.Reader<Integer, Long> reader = new ImmutableBTreeIndex.Reader(tmpDir,
            new IntSerializer(), new LongSerializer(), false);
    final int max = ints[ints.length - 1];
    final AtomicInteger done = new AtomicInteger(8);
    for (int i = 0; i < 8; i++) {
        final int index = i;
        new Thread(new Runnable() {
            @Override//from w w  w .  j  a va 2  s.c  om
            public void run() {
                try {
                    final Random r = new Random(index);
                    for (int i = 0; i < treeSize; i++) {
                        int rand = r.nextInt(max + 10);
                        int insertionindex = Arrays.binarySearch(ints, rand);
                        final Iterator<Generation.Entry<Integer, Long>> iterator = reader.reverseIterator(rand,
                                true);
                        final boolean hasPrevious = iterator.hasNext();
                        Generation.Entry<Integer, Long> entry = null;
                        assertEquals(
                                "rand: " + rand + " hasPrevious: " + hasPrevious
                                        + (hasPrevious ? " previous: " + (entry = iterator.next()) : ""),
                                hasPrevious, insertionindex != -1);
                        if (hasPrevious) {
                            if (entry == null)
                                entry = iterator.next();
                            assertTrue(entry.getKey() <= rand);
                            assertTrue(entry.getKey().longValue() == entry.getValue());
                        }
                        if (insertionindex >= 0) {
                            if (entry == null)
                                entry = iterator.next();
                            assertTrue(rand == ints[insertionindex]);
                            assertTrue(entry.getKey() == rand);
                            Generation.Entry<Integer, Long> result = reader.get(rand);
                            assertTrue(result.getValue() == rand);
                        } else {
                            if (hasPrevious) {
                                assertTrue(ints[(~insertionindex) - 1] < rand);
                                assertTrue(ints[(~insertionindex) - 1] == entry.getKey());
                            }
                            Generation.Entry<Integer, Long> result = reader.get(rand);
                            assertTrue(result == null);
                        }
                    }
                } finally {
                    done.decrementAndGet();
                }
            }
        }).start();
    }
    while (done.get() > 0) {
        Thread.yield();
    }
    reader.close();
}

From source file:com.alibaba.rocketmq.broker.BrokerStartup.java

public static BrokerController createBrokerController(String[] args) {
    System.setProperty(RemotingCommand.RemotingVersionKey, Integer.toString(MQVersion.CurrentVersion));

    // Socket???//from   w  w w .  java  2 s .  c o m
    if (null == System.getProperty(NettySystemConfig.SystemPropertySocketSndbufSize)) {
        NettySystemConfig.SocketSndbufSize = 131072;
    }

    // Socket?
    if (null == System.getProperty(NettySystemConfig.SystemPropertySocketRcvbufSize)) {
        NettySystemConfig.SocketRcvbufSize = 131072;
    }

    try {
        // ?
        Options options = ServerUtil.buildCommandlineOptions(new Options());
        commandLine = ServerUtil.parseCmdLine("mqbroker", args, buildCommandlineOptions(options),
                new PosixParser());
        if (null == commandLine) {
            System.exit(-1);
            return null;
        }

        // ??
        final BrokerConfig brokerConfig = new BrokerConfig();
        final NettyServerConfig nettyServerConfig = new NettyServerConfig();
        final NettyClientConfig nettyClientConfig = new NettyClientConfig();
        nettyServerConfig.setListenPort(10911);
        final MessageStoreConfig messageStoreConfig = new MessageStoreConfig();

        // slave
        if (BrokerRole.SLAVE == messageStoreConfig.getBrokerRole()) {
            int ratio = messageStoreConfig.getAccessMessageInMemoryMaxRatio() - 10;
            messageStoreConfig.setAccessMessageInMemoryMaxRatio(ratio);
        }

        // ??
        if (commandLine.hasOption('p')) {
            MixAll.printObjectProperties(null, brokerConfig);
            MixAll.printObjectProperties(null, nettyServerConfig);
            MixAll.printObjectProperties(null, nettyClientConfig);
            MixAll.printObjectProperties(null, messageStoreConfig);
            System.exit(0);
        } else if (commandLine.hasOption('m')) {
            MixAll.printObjectProperties(null, brokerConfig, true);
            MixAll.printObjectProperties(null, nettyServerConfig, true);
            MixAll.printObjectProperties(null, nettyClientConfig, true);
            MixAll.printObjectProperties(null, messageStoreConfig, true);
            System.exit(0);
        }

        // ?
        if (commandLine.hasOption('c')) {
            String file = commandLine.getOptionValue('c');
            if (file != null) {
                configFile = file;
                InputStream in = new BufferedInputStream(new FileInputStream(file));
                properties = new Properties();
                properties.load(in);
                MixAll.properties2Object(properties, brokerConfig);
                MixAll.properties2Object(properties, nettyServerConfig);
                MixAll.properties2Object(properties, nettyClientConfig);
                MixAll.properties2Object(properties, messageStoreConfig);

                BrokerPathConfigHelper.setBrokerConfigPath(file);

                System.out.println("load config properties file OK, " + file);
            }
        }

        MixAll.properties2Object(ServerUtil.commandLine2Properties(commandLine), brokerConfig);

        if (null == brokerConfig.getRocketmqHome()) {
            System.out.println("Please set the " + MixAll.ROCKETMQ_HOME_ENV
                    + " variable in your environment to match the location of the RocketMQ installation");
            System.exit(-2);
        }

        // Name Server?? IP:PORT
        String namesrvAddr = brokerConfig.getNamesrvAddr();
        if (null != namesrvAddr) {
            try {
                String[] addrArray = namesrvAddr.split(";");
                if (addrArray != null) {
                    for (String addr : addrArray) {
                        RemotingUtil.string2SocketAddress(addr);
                    }
                }
            } catch (Exception e) {
                System.out.printf(
                        "The Name Server Address[%s] illegal, please set it as follows, \"127.0.0.1:9876;192.168.0.1:9876\"\n",
                        namesrvAddr);
                System.exit(-3);
            }
        }

        // BrokerId?
        switch (messageStoreConfig.getBrokerRole()) {
        case ASYNC_MASTER:
        case SYNC_MASTER:
            // Master Id0
            brokerConfig.setBrokerId(MixAll.MASTER_ID);
            break;
        case SLAVE:
            if (brokerConfig.getBrokerId() <= 0) {
                System.out.println("Slave's brokerId must be > 0");
                System.exit(-3);
            }

            break;
        default:
            break;
        }

        // Master?Slave???+1
        messageStoreConfig.setHaListenPort(nettyServerConfig.getListenPort() + 1);

        // ?Logback
        LoggerContext lc = (LoggerContext) LoggerFactory.getILoggerFactory();
        JoranConfigurator configurator = new JoranConfigurator();
        configurator.setContext(lc);
        lc.reset();
        configurator.doConfigure(brokerConfig.getRocketmqHome() + "/conf/logback_broker.xml");
        log = LoggerFactory.getLogger(LoggerName.BrokerLoggerName);

        // ???
        MixAll.printObjectProperties(log, brokerConfig);
        MixAll.printObjectProperties(log, nettyServerConfig);
        MixAll.printObjectProperties(log, nettyClientConfig);
        MixAll.printObjectProperties(log, messageStoreConfig);

        // ??
        final BrokerController controller = new BrokerController(//
                brokerConfig, //
                nettyServerConfig, //
                nettyClientConfig, //
                messageStoreConfig);
        boolean initResult = controller.initialize();
        if (!initResult) {
            controller.shutdown();
            System.exit(-3);
        }

        Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
            private volatile boolean hasShutdown = false;
            private AtomicInteger shutdownTimes = new AtomicInteger(0);

            @Override
            public void run() {
                synchronized (this) {
                    log.info("shutdown hook was invoked, " + this.shutdownTimes.incrementAndGet());
                    if (!this.hasShutdown) {
                        this.hasShutdown = true;
                        long begineTime = System.currentTimeMillis();
                        controller.shutdown();
                        long consumingTimeTotal = System.currentTimeMillis() - begineTime;
                        log.info("shutdown hook over, consuming time total(ms): " + consumingTimeTotal);
                    }
                }
            }
        }, "ShutdownHook"));

        return controller;
    } catch (Throwable e) {
        e.printStackTrace();
        System.exit(-1);
    }

    return null;
}

From source file:com.pari.nm.modules.jobs.PcbImportJob.java

@Override
public void execute(JobExecutionContext context) throws JobExecutionException {

    long t = System.currentTimeMillis();
    // to give enough time for client to register for the jobstatus messages
    try {//from  w  w w.j  av a2 s.co  m
        Thread.sleep(5000);
    } catch (Exception ee) {
    }
    logger = PariLoggerFactory.getLogger(Constants.NM_LOGGER);
    JobDetail job = context.getJobDetail();
    jobId = InventoryDBHelper.getJobId(job.getName(), job.getGroup());
    jobRunId = job.getJobDataMap().getInt("jobrunid");
    keys = (String[]) job.getJobDataMap().get("PCBSelectedDevices");
    int numDevs = (keys == null) ? 0 : keys.length;
    pcbFileName = (String) job.getJobDataMap().get("PCBFileName");
    grpName = (String) job.getJobDataMap().get("GroupName");
    login = (String) job.getJobDataMap().get("login");
    uId = ((Integer) job.getJobDataMap().get("userId")).intValue();
    customerId = ((Integer) job.getJobDataMap().get("CustomerID")).intValue();
    wingInstanceName = (String) job.getJobDataMap().get("WingInstanceName");
    custWingUniqueId = customerId + "_" + wingInstanceName;
    int row_id = -1;
    int version = -1;
    Key key = null;
    Map<String/* deivceIp */, PerDeviceImportStatus> perDeviceImportStatus = null;
    Map<String/* deivceIp */, PerDeviceConfigBackupStatus> perDeviceConfigBackupStatus = null;
    nccmJobId = ((Integer) job.getJobDataMap().get("nccmJobId")).intValue();
    nccmJobRunId = ((Integer) job.getJobDataMap().get("nccmJobRunId")).intValue();
    profileName = (String) job.getJobDataMap().get("profileName");

    // Fix for Duplicate device issue with parallel device import jobs for same customer
    // For a customer, only 1 import job will run at a time and other jobs need to wait until previous
    // import jobs are done.
    if (currentCustomerJobTokens.containsKey(custWingUniqueId)) {
        token = currentCustomerJobTokens.get(custWingUniqueId).incrementAndGet();
        logger.debug("The token for customer " + customerId + " is " + token);
        runnableTokens.get(custWingUniqueId).offer(token);
    } else {
        AtomicInteger retVal = currentCustomerJobTokens.putIfAbsent(custWingUniqueId, new AtomicInteger(0));
        // This can happen first time if two threads simultaneously try to put value in currentCustomerJobTokens
        // If one thread puts the value, other thread will get some return value...
        if (retVal != null) {
            token = currentCustomerJobTokens.get(custWingUniqueId).incrementAndGet();
            runnableTokens.get(custWingUniqueId).offer(token);
        } else {
            PriorityBlockingQueue<Integer> pbq = new PriorityBlockingQueue<Integer>();
            runnableTokens.put(custWingUniqueId, pbq);
            pbq.offer(token);
        }
        currentCustomerRunningToken.put(custWingUniqueId, -1);
        logger.debug("The token for customer " + customerId + " is " + token);
    }

    if (runnableTokens.get(custWingUniqueId) != null) {
        if (currentCustomerRunningToken.get(custWingUniqueId) == -1) {
            currentCustomerRunningToken.put(custWingUniqueId, runnableTokens.get(custWingUniqueId).peek());
        }

        if (runnableTokens.get(custWingUniqueId).peek() != currentCustomerRunningToken.get(custWingUniqueId)) {
            logMsg("Waiting for previous VSEM Import Jobs on same customer to finish.");

            while (runnableTokens.get(custWingUniqueId).peek() != currentCustomerRunningToken
                    .get(custWingUniqueId)) {
                try {
                    Thread.sleep(1 * 60 * 1000);
                } catch (Exception e) {
                    logger.error("Error while waiting for VSEM Jobs to finish...", e);
                }
            }
        }

        // removing the head of queue
        runnableTokens.get(custWingUniqueId).poll();
    }

    try {
        String zipFileName = job.getJobDataMap().getString("ZIPFILE");
        PCBFileIf pcbFile = null;
        if (customerId >= 0) {
            row_id = ServerDBHelper.insertPcbImportLog(customerId, System.currentTimeMillis(),
                    "NCCM Device file import is in progress.", wingInstanceName);
            try {
                processCustomerAndInstance(row_id);
            } catch (Exception ex) {
                logger.error("Exception while processing customer instance information", ex);
                return;
            }
        }
        if (zipFileName != null) {
            logger.debug("Processing zip file:" + zipFileName);
            PcbImportJobStatus jobStatus = new PcbImportJobStatus(jobId, jobRunId, 10, "Processing zip file.",
                    JobStatus.RUNNING);
            ClientSessionManager.getInstance().sendJobStatusMessages(jobId, jobStatus);

            ZIPImportListener zipListener = new ZIPImportListener();
            logger.debug("processing zip file : " + zipFileName + " Job: " + jobId + " Runid: " + jobRunId);
            ZIPProcessor zipProcessor = ZIPProcessor.open(new File(zipFileName), zipListener, customerId);
            try {
                while (zipListener.inProgress) {
                    Thread.sleep(3000);
                }
                String err = zipProcessor.getErrorMessage();
                if (err != null && !err.isEmpty()) {
                    context.setResult("Completed");
                    JobRun.logJobCompletionStatus(jobId, jobRunId, false);
                    jobStatus = new PcbImportJobStatus(jobId, jobRunId, 100, err, JobStatus.FAILED);
                    logMsg(err); // CSCua41383
                    // CSCua44590: Customer upload summary shows wrong msg when an invalid file is imported
                    ServerDBHelper.updatePcbImportLog(customerId, row_id, err);
                    ClientSessionManager.getInstance().sendJobStatusMessages(jobId, jobStatus);
                    return;
                }
                String[] deviceNames = zipProcessor.getDeviceNames();
                int numZipDevs = (deviceNames == null) ? 0 : deviceNames.length;
                ZIPImportFileResultMsg msg = new ZIPImportFileResultMsg(jobId, jobRunId, 10,
                        "ZIP Import successful", zipProcessor.getFileResultMap(), zipProcessor.getNumEntries(),
                        numZipDevs, zipProcessor.getErrorMessage(), zipProcessor.getWarningMessage());
                ClientSessionManager.getInstance().sendJobStatusMessages(jobId, msg);
                if (deviceNames == null || numZipDevs == 0) {
                    context.setResult("Completed");
                    JobRun.logJobCompletionStatus(jobId, jobRunId, false);
                    // CSCua44590: Updated the Displaying Message.
                    ServerDBHelper.updatePcbImportLog(customerId, row_id,
                            "Unable to find any valid devices in the zip file.");
                    jobStatus = new PcbImportJobStatus(jobId, jobRunId, 100,
                            "Unable to find any valid devices in the zip file.", JobStatus.FAILED);
                    logMsg("Unable to find any valid devices in the zip file."); // CSCua41383
                    ClientSessionManager.getInstance().sendJobStatusMessages(jobId, jobStatus);
                    return;
                }
                jobStatus = new PcbImportJobStatus(jobId, jobRunId, 10,
                        "Processed ZIP file. Found out " + numZipDevs + " devices from "
                                + zipProcessor.getNumEntries() + " files. Converting to VSEM file.",
                        JobStatus.RUNNING);
                ClientSessionManager.getInstance().sendJobStatusMessages(jobId, jobStatus);

                logger.debug("Zip processing done for file: " + zipFileName + " Job: " + jobId + " Runid: "
                        + jobRunId);
                List<HeuristicDescriptor> heus = HeuristicManager.getInstance()
                        .getHeuristics(PCBFile.DEVICE_FAMILY);
                File f = new File(zipFileName);
                File f1 = f.getParentFile();
                File f2 = new File(f1, "TEMP");
                f2.mkdir();
                File f3 = new File(f2, "PCB_" + System.nanoTime());
                f3.mkdir();
                pcbFileName = f3.getAbsolutePath();
                ZIP2PCBv2Converter converter = new ZIP2PCBv2Converter();
                converter.exportToVSEMFile(zipProcessor, f3, null, heus);
                logger.debug("Exported to VSEM file");
                jobStatus = new PcbImportJobStatus(jobId, jobRunId, 10, "Exported "
                        + zipProcessor.getDeviceNames().length + " devices to VSEM file. Starting import...",
                        JobStatus.RUNNING);
                ClientSessionManager.getInstance().sendJobStatusMessages(jobId, jobStatus);
                version = 2;
                f.delete(); // CSCtz18817
            } finally {
                zipProcessor.cleanup();
            }
        } else {
            key = getKey(customerId);
            logger.debug("Checking PCB Version for file: " + pcbFileName);
            try {
                version = getPCBFileVersion(pcbFileName, key);
            } catch (Exception ex) {
                // CSCtz03087:Job State not getting populated properly in Inventory job logs
                context.setResult("Completed");
                JobRun.logJobCompletionStatus(jobId, jobRunId, false);
                PcbImportJobStatus jobStatus = new PcbImportJobStatus(jobId, jobRunId, 100,
                        "Not a valid PCB/VSEM File", JobStatus.FAILED);
                logMsg("Not a valid PCB/VSEM File"); // CSCua41383
                // CSCua44590: Customer upload summary shows wrong msg when an invalid file is imported
                ServerDBHelper.updatePcbImportLog(customerId, row_id, "Not a valid PCB/VSEM File");
                ClientSessionManager.getInstance().sendJobStatusMessages(jobId, jobStatus);
                return;
            }
            logger.debug("Found out PCB Version in pcb File: " + pcbFileName + " to be " + version);
        }
        PcbImportJobMsg pcbImMsg = null;
        if (version == 1) {
            pcbFile = processPcbV1(key);
            logger.debug("Job: " + jobId + " Number of thread for PCB import: " + noThreads);
            pcbImMsg = new PcbImportJobMsg(keys, pcbFile, customerId, grpName, wingInstanceName, this, row_id);
            numDevs = (keys == null) ? 0 : keys.length;
            Messenger.getInstance().publish(MessageTypes.TRIGGER_PCBIMPORT, pcbImMsg);
        } else {
            try {
                String login = "Unknown " + uId;
                UserDetails user = UsersFactory.getUser(uId);
                if (user != null) {
                    login = user.getLogin();
                }
                // Pass jobId also so that DeviceAdder where device list is prepared for custom reports can maintain
                // map of jobId and device list. This will make sure things work fine in case of parallel vsem
                // imports.

                UserDetails historyUserObj = UsersFactory.getUser(InventoryDBHelper.getJobCreatorId(nccmJobId));
                String historyUser = login;

                if (historyUserObj != null)
                    historyUser = historyUserObj.getLogin();

                VSEMImporter vsemImporter = new VSEMImporter(pcbFileName, customerId, wingInstanceName, grpName,
                        this, row_id, login, jobId, new JobParameters(jobId, nccmJobId, historyUser));
                try {
                    vsemImporter.importVsem();
                } catch (Exception expr) {
                    context.setResult("Completed");
                    JobRun.logJobCompletionStatus(jobId, jobRunId, false);
                    ServerDBHelper.updatePcbImportLog(customerId, row_id, expr.getMessage());
                    PcbImportJobStatus jobStatus = new PcbImportJobStatus(jobId,
                            job.getJobDataMap().getInt("jobrunid"), 10, "Validate Licenses.",
                            JobStatus.RUNNING);
                    jobStatus = new PcbImportJobStatus(jobId, job.getJobDataMap().getInt("jobrunid"), 100,
                            expr.getMessage(), JobStatus.FAILED);
                    logMsg("No sufficient license found." + expr.getMessage());
                    ClientSessionManager.getInstance().sendJobStatusMessages(jobId, jobStatus);
                    return;
                }
                numDevs = vsemImporter.getNumberOfDevices();
                perDeviceImportStatus = vsemImporter.getPerDeviceImportStatus();
                perDeviceConfigBackupStatus = vsemImporter.getPerDeviceConfigBackupStatus();
                /*XMLImporter xmlImporter = new XMLImporter(pcbFileName, customerId, wingInstanceName, grpName, this,
                    row_id, login, jobId);
                xmlImporter.importXml();
                numDevs = xmlImporter.getNumberOfDevices();
                perDeviceImportStatus = xmlImporter.getPerDeviceImportStatus();
                perDeviceConfigBackupStatus = xmlImporter.getPerDeviceConfigBackupStatus();*/
                noThreads = 0;
            } catch (Throwable ex) {
                logger.error("Exception while importing from VSEM File: " + pcbFile, ex);
            }
        }
        if (noThreads > numDevs) {
            noThreads = numDevs;
        }

        int size = numDevs;
        System.out.println("Number of devices = " + size);
        while (noThreads > 0) {
            try {
                Thread.sleep(2000);
                logger.debug("Job: " + jobId + ". " + noThreads + " more to go.");
            } catch (Exception ee) {
            }
        }
        DeviceImportEventHandler evt = new DeviceImportEventHandler();
        if (virtsAdded.size() > 0) {
            Iterator<VirtualDeviceRefresh> it = virtsAdded.iterator();
            while (it.hasNext()) {
                VirtualDeviceRefresh deviceAddedMsg = it.next();
                try {
                    DeviceManager.getInstance().processDevice(MessageTypes.VIRUAL_DEVICE_DISCOVERED,
                            deviceAddedMsg);
                    Messenger.getInstance().publish(MessageTypes.DEVICE_CONFIG_CHANGED,
                            deviceAddedMsg.getNodeId() + "");
                    try {
                        ServerAuditLog.getInstance().logAudit(login, ServerAuditConstants.DEVICE_MANAGEMENT,
                                ServerAuditConstants.DEVICE_MANAGEMENT_DEVICE_STATE,
                                "Virtual Device (" + deviceAddedMsg.getDeviceName() + ") added.", -1, -1);
                    } catch (Exception ee) {
                    }
                } catch (Exception ex) {
                    ex.printStackTrace();
                }
            }
        }
        if (version == 1) {
            // look for voip phones
            String voipList = pcbFile.getAttributeValue("ExtendedAttributes", "VoipList");
            if (voipList != null) {
                populateVoIPPhones(voipList);
            }
        }

        context.setResult("Completed");
        if (state != JobStatus.FAILED) {
            state = JobStatus.SUCCESS;
        }
        JobRun.logJobCompletionStatus(jobId, jobRunId, (state == JobStatus.SUCCESS ? true : false));
        PcbImportJobStatus jobStatus = new PcbImportJobStatus(jobId, jobRunId, 100, "Completed", state);
        logger.error("Finished executing job: " + jobId + " in " + ((System.currentTimeMillis() - t) / 1000)
                + " secs");
        ClientSessionManager.getInstance().sendJobStatusMessages(jobId, jobStatus);
        ServerAuditLog.getInstance().logAudit(login, ServerAuditConstants.DEVICE_MANAGEMENT,
                ServerAuditConstants.DEVICE_MANAGEMENT_DISCOVERY, "Pcb/Zip Import Task Triggered.", jobId,
                jobRunId);
        ServerDBHelper.updatePcbImportLog(customerId, row_id,
                (state == 1) ? "NCCM Device File import partially successfull."
                        : "Successfully imported the NCCM Device File .");
        setDeviceImportEventParameters(job, numDevs, row_id, evt);
        evt.setJobStatus((state == JobStatus.SUCCESS) ? "Success" : "Failed");
        evt.setPerDeviceImportStatus(perDeviceImportStatus);
        EventManager.getInstance().sendEvent(evt);

        if (perDeviceConfigBackupStatus != null && !perDeviceConfigBackupStatus.isEmpty()) {
            ConfigBackupEventHandler cfgbkpEvt = new ConfigBackupEventHandler();
            cfgbkpEvt.setJobId(jobId);
            cfgbkpEvt.setJobRunId(jobRunId);
            cfgbkpEvt.setJobName(job.getName());
            cfgbkpEvt.setCount(perDeviceConfigBackupStatus.size());
            cfgbkpEvt.setUserId(uId);
            cfgbkpEvt.setPerDeviceConfigBackupStatus(perDeviceConfigBackupStatus);
            cfgbkpEvt.setJobStatus((state == JobStatus.SUCCESS) ? "Success" : "Failed");
            EventManager.getInstance().sendEvent(cfgbkpEvt);
        }
    } catch (Exception ee) {
        ServerDBHelper.updatePcbImportLog(customerId, row_id, ee.getMessage());
        ee.printStackTrace();
        logger.error("Error while importing PCB File", ee);
        logMsg("Error while importing PCB File: " + ee.getMessage()); // CSCtx75737
        context.setResult("Completed"); // CSCtx75737 - Setting State of the Job.
        JobRun.logJobCompletionStatus(jobId, jobRunId, false);
        PcbImportJobStatus jobStatus = new PcbImportJobStatus(jobId, jobRunId, 100, "Completed",
                JobStatus.FAILED);
        ClientSessionManager.getInstance().sendJobStatusMessages(jobId, jobStatus);
        DeviceImportEventHandler evt = new DeviceImportEventHandler();
        setDeviceImportEventParameters(job, numDevs, row_id, evt);
        evt.setJobStatus("Failed");
        evt.setStatus(Constants.PCB_IMPORT_FAIL);
        EventManager.getInstance().sendEvent(evt);

        if (perDeviceConfigBackupStatus != null && !perDeviceConfigBackupStatus.isEmpty()) {
            ConfigBackupEventHandler cfgbkpEvt = new ConfigBackupEventHandler();
            cfgbkpEvt.setJobId(jobId);
            cfgbkpEvt.setJobRunId(jobRunId);
            cfgbkpEvt.setJobName(job.getName());
            cfgbkpEvt.setCount(perDeviceConfigBackupStatus.size());
            cfgbkpEvt.setUserId(uId);
            cfgbkpEvt.setPerDeviceConfigBackupStatus(perDeviceConfigBackupStatus);
            cfgbkpEvt.setJobStatus("Failed");
            EventManager.getInstance().sendEvent(cfgbkpEvt);
        }
    } finally {
        try {
            // Invoke custom report engine after vsem import job is complete - generates report for devices in vsem
            // CustomReportHandler handler = CustomReportHandler.getInstance();
            // handler.generateReport();
            CustomReportJobDetails jobDetails = new CustomReportJobDetails();
            jobDetails.setJobDesc("Job triggered due to device import");
            jobDetails.setVsemFileName(pcbFileName, customerId);
            jobDetails.setVsemImportJobId(jobId);
            List<ScriptInfo> list = ReportDefManagerImpl.getInstance().getBasicScriptInfoByType(ScriptType.Top,
                    customerId);

            // if a vsem file is being imported for a customer, then all the scripts which belong to him as well as
            // the scripts which belong to all customers should be executed.
            if (customerId != ICEntity.ALL_CUSTOMER_ID) {
                list.addAll(ReportDefManagerImpl.getInstance().getBasicScriptInfoByType(ScriptType.Top,
                        ICEntity.ALL_CUSTOMER_ID));
            }

            // customerId);
            Set<String> devices = new HashSet<String>();
            StringBuilder devIds = new StringBuilder();
            // Get unique list of devices associated with all the scripts.
            for (ScriptInfo script : list) {
                Set<Device> devSet = NCCMCustomReportHandler.getInstance().getDeviceList(script, false, jobId);
                for (Device dev : devSet) {
                    devices.add(dev.getDeviceID());
                }
            }

            int i = 0;
            for (String device : devices) {
                devIds.append("D").append(device);
                if (i < (devices.size() - 1)) {
                    devIds.append(",");
                }
                i++;
            }

            if (list.size() > 0) {
                jobDetails.setDevices(devIds.toString());
                JobMgr.getInstance().scheduleRunNowCustomReportJob(uId, jobDetails);
            }

            if (pcbFileName != null) {
                pushToBackupServer();
            }
        } catch (Exception ee) {
            ee.printStackTrace();
        } finally {
            if (runnableTokens.get(custWingUniqueId) != null) {
                // Remove from currentCustomerJob if all threads have finished
                if (runnableTokens.get(custWingUniqueId).isEmpty()) {
                    currentCustomerJobTokens.remove(custWingUniqueId);
                } else {
                    // Getting and setting the latest token to the current customer.
                    currentCustomerRunningToken.put(custWingUniqueId,
                            runnableTokens.get(custWingUniqueId).peek());
                }
            }
        }
    }
}

From source file:com.vmware.identity.idm.server.provider.ldap.LdapProvider.java

public LdapProvider(IIdentityStoreData store, Collection<X509Certificate> tenantTrustedCertificates) {
    super(store, tenantTrustedCertificates);
    Validate.isTrue(this.getStoreDataEx().getProviderType() == IdentityStoreType.IDENTITY_STORE_TYPE_LDAP,
            "IIdentityStoreData must represent a store of 'IDENTITY_STORE_TYPE_LDAP' type.");

    _specialAttributes = new HashSet<String>();
    _specialAttributes.add(SPECIAL_ATTR_SUBJECT_TYPE.toLowerCase());
    _specialAttributes.add(SPECIAL_ATTR_USER_PRINCIPAL_NAME.toLowerCase());
    _specialAttributes.add(SPECIAL_ATTR_MEMBER_OF.toLowerCase());
    _ldapSchemaMapping = new OpenLdapSchemaMapping(this.getStoreDataEx().getIdentityStoreSchemaMapping());
    _pagedResultSupportedFlag = new AtomicInteger(PAGED_RESULT_SUPPORTED_UNKNOWN);

    USER_GROUP_MEMBERS_LIST_LINK_ATTRIBUTE = _ldapSchemaMapping
            .getUserAttribute(IdentityStoreAttributeMapping.AttributeIds.UserAttributeGroupMembersListLink);
    GROUP_GROUP_MEMBERS_LIST_LINK_ATTRIBUTE = _ldapSchemaMapping
            .getGroupAttribute(IdentityStoreAttributeMapping.AttributeIds.GroupAttributeGroupMembersListLink);
    _userGroupMembersListLinkExists = _ldapSchemaMapping.doesLinkExist(USER_GROUP_MEMBERS_LIST_LINK_ATTRIBUTE);
    _userGroupMembersListLinkIsDn = _ldapSchemaMapping.isDnAttribute(USER_GROUP_MEMBERS_LIST_LINK_ATTRIBUTE);
    _groupGroupMembersListLinkExists = _ldapSchemaMapping
            .doesLinkExist(GROUP_GROUP_MEMBERS_LIST_LINK_ATTRIBUTE);
    _groupGroupMembersListLinkIsDn = _ldapSchemaMapping.isDnAttribute(GROUP_GROUP_MEMBERS_LIST_LINK_ATTRIBUTE);
}

From source file:org.dasein.cloud.azure.tests.network.AzureIpAddressSupportTest.java

@Test
public void stopForwardToServerShouldPostCorrectRequest() throws CloudException, InternalException {
    final AtomicInteger putCount = new AtomicInteger(0);
    new MockUp<CloseableHttpClient>() {
        @Mock(invocations = 2)//from   w ww.j  av a  2  s.  c  o m
        public CloseableHttpResponse execute(Invocation inv, HttpUriRequest request) throws IOException {
            if (request.getMethod().equals("GET")) {
                DaseinObjectToXmlEntity<PersistentVMRoleModel> daseinEntity = new DaseinObjectToXmlEntity<PersistentVMRoleModel>(
                        createPersistentVMRoleModelWithEndpoint());
                assertGet(request, EXPECTED_URL,
                        new Header[] { new BasicHeader("x-ms-version", "2012-03-01") });
                return getHttpResponseMock(getStatusLineMock(HttpServletResponse.SC_OK), daseinEntity,
                        new Header[] { new BasicHeader("x-ms-request-id", UUID.randomUUID().toString()) });
            } else if (request.getMethod().equals("PUT")) {
                putCount.incrementAndGet();
                PersistentVMRoleModel persistentVMRoleModel = createPersistentVMRoleModelWithoutEndpoint();
                //set an empty list otherwise unitils will assert fail as one is null while another is empty list
                persistentVMRoleModel.getConfigurationSets().get(0)
                        .setInputEndpoints(new ArrayList<PersistentVMRoleModel.InputEndpoint>());
                assertPut(request, EXPECTED_URL, new Header[] { new BasicHeader("x-ms-version", "2012-03-01") },
                        persistentVMRoleModel);
                return getHttpResponseMock(getStatusLineMock(HttpServletResponse.SC_ACCEPTED), null,
                        new Header[] { new BasicHeader("x-ms-request-id", UUID.randomUUID().toString()) });
            } else {
                throw new IOException("Request is not mocked");
            }
        }
    };
    String ruleId = new AzureRuleIdParts(VM_ID, Protocol.TCP.toString(), String.valueOf(PRIVATE_PORT))
            .toProviderId();
    ipAddressSupport.stopForwardToServer(ruleId, VM_ID);
    assertEquals("PUT count doesn't match", 1, putCount.get());
}

From source file:org.fcrepo.client.ConnectionManagementTest.java

/**
 * Demonstrates that HTTP connections are released when the user of the FcrepoClient closes the HTTP entity body.
 * Each method of the FcrepoClient (get, put, post, etc.) is tested.
 *//*from  ww w. j av  a  2  s.  c  o m*/
@Test
public void connectionReleasedOnEntityBodyClose() {
    final int expectedCount = (int) Stream.of(HttpMethods.values()).filter(m -> m.entity).count();
    final AtomicInteger actualCount = new AtomicInteger(0);
    final MockHttpExpectations.Uris uri = uris.uri200;

    Stream.of(HttpMethods.values()).filter(method -> method.entity).forEach(method -> {
        connect(client, uri, method, FcrepoResponseHandler.closeEntityBody);
        actualCount.getAndIncrement();
    });

    assertEquals("Expected to make " + expectedCount + " connections; made " + actualCount.get(), expectedCount,
            actualCount.get());
    verifyConnectionRequestedAndClosed(actualCount.get(), connectionManager);
}