List of usage examples for java.util.concurrent.atomic AtomicLong AtomicLong
public AtomicLong(long initialValue)
From source file:de.thorstenberger.examServer.dao.xml.TaskDefDaoImpl.java
/** * *///w w w. j a v a2 s. co m public TaskDefDaoImpl(final ExamServerManager examServerManager) { super("de.thorstenberger.examServer.dao.xml.jaxb", examServerManager.getSystemDir(), "taskdefs.xml"); if (!existsWorkingFile()) { taskDefs = objectFactory.createTaskDefs(); this.crntId = new AtomicLong(0); save(taskDefs); return; } else { taskDefs = (TaskDefs) load(); this.crntId = new AtomicLong(findMostRecentId(taskDefs)); } }
From source file:com.aol.advertising.qiao.management.metrics.StatisticsStore.java
@Override public void init() throws Exception { statsManager.addStatsStore(id, this); // initialize predefined counters for (StatsEnum t : StatsEnum.values()) { stats.put(t.value(), new AtomicLong(0)); }//from ww w . j a v a 2 s. c o m }
From source file:io.druid.server.initialization.JettyTest.java
@Test @Ignore // this test will deadlock if it hits an issue, so ignored by default public void testTimeouts() throws Exception { // test for request timeouts properly not locking up all threads final Executor executor = Executors.newFixedThreadPool(100); final AtomicLong count = new AtomicLong(0); final CountDownLatch latch = new CountDownLatch(1000); for (int i = 0; i < 10000; i++) { executor.execute(new Runnable() { @Override//from w w w . j a v a2 s .c om public void run() { executor.execute(new Runnable() { @Override public void run() { long startTime = System.currentTimeMillis(); long startTime2 = 0; try { ListenableFuture<StatusResponseHolder> go = client.go( new Request(HttpMethod.GET, new URL("http://localhost:" + port + "/slow/hello")), new StatusResponseHandler(Charset.defaultCharset())); startTime2 = System.currentTimeMillis(); go.get(); } catch (Exception e) { e.printStackTrace(); } finally { System.out.println("Response time client" + (System.currentTimeMillis() - startTime) + "time taken for getting future" + (System.currentTimeMillis() - startTime2) + "Counter " + count.incrementAndGet()); latch.countDown(); } } }); } }); } latch.await(); }
From source file:eu.stratosphere.nephele.io.channels.DistributedChannelWithAccessInfo.java
DistributedChannelWithAccessInfo(final FileSystem fs, final Path checkpointFile, final int bufferSize, final boolean deleteOnClose) throws IOException { this.fs = fs; this.checkpointFile = checkpointFile; this.channel = new FileChannelWrapper(fs, checkpointFile, bufferSize, (short) 2); this.reservedWritePosition = new AtomicLong(0L); this.referenceCounter = new AtomicInteger(0); this.deleteOnClose = new AtomicBoolean(deleteOnClose); }
From source file:com.openteach.diamond.network.waverider.WaveriderClient.java
@Override public void initialize() { super.initialize(); idGenerator = new AtomicLong(0); pendingRequestMap = new ConcurrentHashMap<String, PendingRequest>(); slaveNode = WaveriderFactory.newInstance(config).buildSlave(); slaveNode.addCommandHandler(COMMAND_DIAMOND_RESPONSE, new CommandHandler() { @Override//from ww w . j a v a 2 s . c o m public Command handle(Command command) { NetworkResponse response = NetworkResponse.unmarshall(command.getPayLoad()); PendingRequest pr = pendingRequestMap.get(response.getId()); if (null == pr) { logger.error(String.format( "Server send response but there is no request for this response: % request id:%d", response.getId())); return null; } pr.response = response; synchronized (pr.request) { pr.request.notifyAll(); } return null; } }); slaveNode.init(); slaveNode.start(); }
From source file:core.Reconciler.java
public void run() { System.out.println("Reconciler: Started the reconciler thread"); sfdcService = new SalesforceService(Configuration.getSalesForceConsumerSecret(), Configuration.getSalesForceConsumerKey(), Configuration.getSalesForceUsername(), Configuration.getSalesForcePassword(), Configuration.isSalesforceSandbox()); SendGridClient.initV2(Configuration.getSendGridUsername(), Configuration.getSendGridPassword(), Configuration.getAlertEmailRecipient(), Configuration.getAlertEmailSender()); NiprClient lClient = NiprClientConfiguration.getNiprClient(Configuration.getGetNiprAlertEndpoint(), Configuration.getNiprUsername(), Configuration.getNiprPassword()); AtomicLong lRetryInterval = null; UUID lResyncTriggerId = LicenseDB.getResyncTriggerId(); while (true) { if (Configuration.isPauseSync()) { System.out.println("System has been paused"); try { Thread.sleep(36000000); } catch (Exception ex) { }/* ww w . ja v a 2 s. c om*/ continue; } try { lRetryInterval = new AtomicLong(Configuration.getReconcilerRetry()); lResyncTriggerId = LicenseDB.getResyncTriggerId(); System.out.println("Reconciler: Current triggered Resync ID " + lResyncTriggerId); // Get the latest copy. This is a Deep Copy Map<String, LicenseInternal> lUnprocessedLicenses = LicenseDB.getUnprocessedLicenses(); Map<String, GregorianCalendar> lDaysToSync = LicenseDB.getPendingNiprSyncDates(); Map<String, LicenseInternal> lLicenses = new HashMap<String, LicenseInternal>(); Map<String, GregorianCalendar> lSuccessDates = new HashMap<String, GregorianCalendar>(); DoNiprSync(lClient, lDaysToSync, lUnprocessedLicenses, lLicenses, lSuccessDates); System.out.println( "Reconciler: " + lLicenses.size() + " new licenses to be processed in Sales Force "); if (lLicenses.size() > 0) { // Process information in sales force, save the remaining // for next run lUnprocessedLicenses = ProcessInfoInSalesForce(lLicenses, lRetryInterval); } System.out.println( "Reconciler: Total Failed licenses in in the system " + lUnprocessedLicenses.size()); // This transfers reference, do not use the map after this call // but get a fresh copy. // Update in the cache, which also serves the UI LicenseDB.setUnprocessedLicenses(lUnprocessedLicenses); LicenseDB.updateNiprSyncDates(lSuccessDates); UUID lLatestTriggerId = LicenseDB.getResyncTriggerId(); if (lLatestTriggerId.compareTo(lResyncTriggerId) != 0) { System.out.println( "Reconciler: Reconciler retrying with minimum sleep as resync triggered by user"); Thread.sleep(MIN_SLEEP_INTERVAL); continue; } long lInterval = lRetryInterval.get(); if (lUnprocessedLicenses.isEmpty()) { // Get the current time and set the interval till next day noon. Calendar cal = Calendar.getInstance(); int lCurrentHour = cal.get(Calendar.HOUR_OF_DAY); // If currentHour is in the morning before 9am, we want to run at 12pm today, since nipr alerts is not generated yet // If currentHour is after 9am, we want to run next day noon which will be 12 hours + 24 - lCurrentHour if (lCurrentHour < 9) { lInterval = (12 - lCurrentHour) * 60 * 60 * 1000; } else { lInterval = (24 - lCurrentHour + 12) * 60 * 60 * 1000; } } System.out.println("Reconciler: Sleeping for " + lInterval + "ms"); try { Thread.sleep(lInterval); } catch (InterruptedException lIntrEx) { System.out.println("Reconciler: interrupted"); } } catch (Exception ex) { System.out.println("Reconciler mainloop threw an exception " + ex.getMessage()); } } }
From source file:org.apache.bookkeeper.mledger.util.RangeCache.java
/** * Construct a new RangeLruCache.//from ww w. j ava 2 s. c om * * @param weighter * a custom weighter to compute the size of each stored value */ public RangeCache(Weighter<Value> weighter) { this.size = new AtomicLong(0); this.entries = new ConcurrentSkipListMap<>(); this.weighter = weighter; }
From source file:nl.knaw.huc.di.tag.model.graph.DotFactory.java
public String toDot(TAGDocument document, final String label) { layerColor.clear();/*from ww w. ja v a 2 s .c om*/ StringBuilder dotBuilder = new StringBuilder("digraph TextGraph{\n") .append(" node [font=\"helvetica\";style=\"filled\";fillcolor=\"white\"]\n") .append(" d [shape=doublecircle;label=\"\"]\n").append(" subgraph{\n"); document.getTextNodeStream().map(this::toTextNodeLine).forEach(dotBuilder::append); dotBuilder.append(" rank=same\n"); textGraph = document.getDTO().textGraph; AtomicLong prevNode = new AtomicLong(-1); textGraph.getTextNodeIdStream().forEach(id -> { if (prevNode.get() != -1) { dotBuilder.append(toNextEdgeLine(prevNode.get(), id)); } prevNode.set(id); }); dotBuilder.append(" }\n"); document.getMarkupStream().map(this::toMarkupNodeLine).forEach(dotBuilder::append); document.getMarkupStream().map(this::toMarkupContinuationLine).forEach(dotBuilder::append); document.getMarkupStream().map(TAGMarkup::getDbId) .flatMap(id -> textGraph.getOutgoingEdges(id).stream().filter(LayerEdge.class::isInstance) .map(LayerEdge.class::cast)) .map(e -> toOutgoingEdgeLine(e, textGraph)).forEach(dotBuilder::append); textGraph.getOutgoingEdges(textGraph.documentNode).stream().flatMap(e -> textGraph.getTargets(e).stream()) .map(root -> " d->m" + root + " [arrowhead=none]\n").forEach(dotBuilder::append); String graphLabel = escape(label); if (!graphLabel.isEmpty()) { dotBuilder.append(" label=<<font color=\"brown\" point-size=\"8\"><i>").append(graphLabel) .append("</i></font>>\n"); } dotBuilder.append("}"); return dotBuilder.toString(); }
From source file:com.mnt.base.stream.comm.PacketProcessorManager.java
protected PacketProcessorManager(int threadSize) { if (enablePacketCacheQueue) { int packetCacheQueueSize = BaseConfiguration.getIntProperty("packet_cache_queue_size", 10000); //streamPacketQueue = new LinkedBlockingQueue<StreamPacket>(packetCacheQueueSize); streamPacketQueueMap = new ConcurrentHashMap<Integer, BlockingQueue<StreamPacket>>(); this.maxQueueMapSize = threadSize; this.pushAi = new AtomicLong(0); this.pollAi = new AtomicLong(0); for (int i = 0; i < this.maxQueueMapSize; i++) { streamPacketQueueMap.put(i, new LinkedBlockingQueue<StreamPacket>(packetCacheQueueSize)); }/*from w w w. j ava 2s .co m*/ threadSize = threadSize > 0 ? threadSize : 1; // at least one thread initProcessorThreads(threadSize); } initProcessorMap(); }
From source file:azkaban.flow.ImmutableFlowManager.java
public ImmutableFlowManager(Map<String, Flow> flowMap, Set<String> rootFlows, Map<String, List<String>> folderToRoot, FlowExecutionSerializer serializer, FlowExecutionDeserializer deserializer, File storageDirectory, long lastId) { this.folderToRoot = folderToRoot; this.flowsMap = flowMap; this.rootFlowNames = rootFlows; this.serializer = serializer; this.deserializer = deserializer; this.storageDirectory = storageDirectory; this.lastId = new AtomicLong(lastId); this.jsonToJava = new JSONToJava(); }