List of usage examples for java.util.concurrent ConcurrentLinkedQueue poll
public E poll()
From source file:org.geowebcache.storage.jdbc.jobstore.JDBCJobWrapper.java
/** * Goes through recently added logs for this job and persists them * Clears recent logs from the list of recent logs. * Uses a ConcurrentLinkedQueue and is threadsafe. * @param stObj/*from ww w .j av a 2 s.co m*/ * @param conn * @throws SQLException * @throws StorageException */ private void putRecentJobLogs(JobObject stObj, Connection conn) throws StorageException, SQLException { ConcurrentLinkedQueue<JobLogObject> logs = stObj.getNewLogs(); while (!logs.isEmpty()) { JobLogObject joblog; synchronized (logs) { joblog = logs.poll(); } // Make sure the joblog points to this job. Sometimes a job might have logs before first // being saved so the logs won't be pointing to the right ID yet. joblog.setJobId(stObj.getJobId()); putJobLog(joblog); } }
From source file:edu.cornell.mannlib.vitro.webapp.rdfservice.impl.jena.RDFServiceJena.java
private List<Statement> sort(List<Statement> stmts) { List<Statement> output = new ArrayList<Statement>(); int originalSize = stmts.size(); if (originalSize == 1) { return stmts; }//from w w w.j a v a2 s . c o m List<Statement> remaining = stmts; ConcurrentLinkedQueue<Resource> subjQueue = new ConcurrentLinkedQueue<Resource>(); for (Statement stmt : remaining) { if (stmt.getSubject().isURIResource()) { subjQueue.add(stmt.getSubject()); break; } } if (subjQueue.isEmpty()) { log.warn("No named subject in statement patterns"); return stmts; } while (remaining.size() > 0) { if (subjQueue.isEmpty()) { subjQueue.add(remaining.get(0).getSubject()); } while (!subjQueue.isEmpty()) { Resource subj = subjQueue.poll(); List<Statement> temp = new ArrayList<Statement>(); for (Statement stmt : remaining) { if (stmt.getSubject().equals(subj)) { output.add(stmt); if (stmt.getObject().isResource()) { subjQueue.add((Resource) stmt.getObject()); } } else { temp.add(stmt); } } remaining = temp; } } if (output.size() != originalSize) { throw new RuntimeException( "original list size was " + originalSize + " but sorted size is " + output.size()); } return output; }
From source file:org.apache.hadoop.yarn.server.nodemanager.NodeStatusUpdaterImpl.java
private List<LogAggregationReport> getLogAggregationReportsForApps( ConcurrentLinkedQueue<LogAggregationReport> lastestLogAggregationStatus) { LogAggregationReport status;/* w w w.ja v a 2 s.c o m*/ while ((status = lastestLogAggregationStatus.poll()) != null) { this.logAggregationReportForAppsTempList.add(status); } List<LogAggregationReport> reports = new ArrayList<LogAggregationReport>(); reports.addAll(logAggregationReportForAppsTempList); return reports; }
From source file:org.languagetool.rules.spelling.suggestions.SuggestionChangesTest.java
public void testChanges() throws IOException, InterruptedException { File configFile = new File(System.getProperty("config", "SuggestionChangesTestConfig.json")); ObjectMapper mapper = new ObjectMapper(new JsonFactory().enable(JsonParser.Feature.ALLOW_COMMENTS)); SuggestionChangesTestConfig config = mapper.readValue(configFile, SuggestionChangesTestConfig.class); SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd_HH:mm:ss"); String timestamp = dateFormat.format(new Date()); Path loggingFile = Paths.get(config.logDir, String.format("suggestionChangesExperiment_%s.log", timestamp)); Path datasetFile = Paths.get(config.logDir, String.format("suggestionChangesExperiment_%s.csv", timestamp)); BufferedWriter writer = Files.newBufferedWriter(loggingFile); CSVPrinter datasetWriter = new CSVPrinter(Files.newBufferedWriter(datasetFile), CSVFormat.DEFAULT.withEscape('\\')); List<String> datasetHeader = new ArrayList<>( Arrays.asList("sentence", "correction", "covered", "replacement", "dataset_id")); SuggestionsChanges.init(config, writer); writer.write("Evaluation configuration: \n"); String configContent = String.join("\n", Files.readAllLines(configFile.toPath())); writer.write(configContent);// w ww. j a va 2s .c o m writer.write("\nRunning experiments: \n"); int experimentId = 0; for (SuggestionChangesExperiment experiment : SuggestionsChanges.getInstance().getExperiments()) { experimentId++; writer.write(String.format("#%d: %s%n", experimentId, experiment)); datasetHeader.add(String.format("experiment_%d_suggestions", experimentId)); datasetHeader.add(String.format("experiment_%d_metadata", experimentId)); datasetHeader.add(String.format("experiment_%d_suggestions_metadata", experimentId)); } writer.newLine(); datasetWriter.printRecord(datasetHeader); BlockingQueue<SuggestionTestData> tasks = new LinkedBlockingQueue<>(1000); ConcurrentLinkedQueue<Pair<SuggestionTestResultData, String>> results = new ConcurrentLinkedQueue<>(); List<SuggestionTestThread> threads = new ArrayList<>(); for (int i = 0; i < Runtime.getRuntime().availableProcessors(); i++) { SuggestionTestThread worker = new SuggestionTestThread(tasks, results); worker.start(); threads.add(worker); } // Thread for writing results from worker threads into CSV Thread logger = new Thread(() -> { try { long messages = 0; //noinspection InfiniteLoopStatement while (true) { Pair<SuggestionTestResultData, String> message = results.poll(); if (message != null) { writer.write(message.getRight()); SuggestionTestResultData result = message.getLeft(); int datasetId = 1 + config.datasets.indexOf(result.getInput().getDataset()); if (result != null && result.getSuggestions() != null && !result.getSuggestions().isEmpty() && result.getSuggestions().stream() .noneMatch(m -> m.getSuggestedReplacements() == null || m.getSuggestedReplacements().isEmpty())) { List<Object> record = new ArrayList<>(Arrays.asList(result.getInput().getSentence(), result.getInput().getCorrection(), result.getInput().getCovered(), result.getInput().getReplacement(), datasetId)); for (RuleMatch match : result.getSuggestions()) { List<String> suggestions = match.getSuggestedReplacements(); record.add(mapper.writeValueAsString(suggestions)); // features extracted by SuggestionsOrdererFeatureExtractor record.add(mapper.writeValueAsString(match.getFeatures())); List<SortedMap<String, Float>> suggestionsMetadata = new ArrayList<>(); for (SuggestedReplacement replacement : match.getSuggestedReplacementObjects()) { suggestionsMetadata.add(replacement.getFeatures()); } record.add(mapper.writeValueAsString(suggestionsMetadata)); } datasetWriter.printRecord(record); } if (++messages % 1000 == 0) { writer.flush(); System.out.printf("Evaluated %d corrections.%n", messages); } } } } catch (IOException e) { throw new RuntimeException(e); } }); logger.setDaemon(true); logger.start(); // format straight from database dump String[] header = { "id", "sentence", "correction", "language", "rule_id", "suggestion_pos", "accept_language", "country", "region", "created_at", "updated_at", "covered", "replacement", "text_session_id", "client" }; int datasetId = 0; // read data, send to worker threads via queue for (SuggestionChangesDataset dataset : config.datasets) { writer.write(String.format("Evaluating dataset #%d: %s.%n", ++datasetId, dataset)); CSVFormat format = CSVFormat.DEFAULT; if (dataset.type.equals("dump")) { format = format.withEscape('\\').withNullString("\\N").withHeader(header); } else if (dataset.type.equals("artificial")) { format = format.withEscape('\\').withFirstRecordAsHeader(); } try (CSVParser parser = new CSVParser(new FileReader(dataset.path), format)) { for (CSVRecord record : parser) { String lang = record.get("language"); String rule = dataset.type.equals("dump") ? record.get("rule_id") : ""; String covered = record.get("covered"); String replacement = record.get("replacement"); String sentence = record.get("sentence"); String correction = record.isSet("correction") ? record.get("correction") : ""; String acceptLanguage = dataset.type.equals("dump") ? record.get("accept_language") : ""; if (sentence == null || sentence.trim().isEmpty()) { continue; } if (!config.language.equals(lang)) { continue; // TODO handle auto maybe? } if (dataset.type.equals("dump") && !config.rule.equals(rule)) { continue; } // correction column missing in export from doccano; workaround if (dataset.enforceCorrect && !record.isSet("correction")) { throw new IllegalStateException("enforceCorrect in dataset configuration enabled," + " but column 'correction' is not set for entry " + record); } if (dataset.type.equals("dump") && dataset.enforceAcceptLanguage) { if (acceptLanguage != null) { String[] entries = acceptLanguage.split(",", 2); if (entries.length == 2) { String userLanguage = entries[0]; // TODO: what to do with e.g. de-AT,de-DE;... if (!config.language.equals(userLanguage)) { continue; } } } } tasks.put(new SuggestionTestData(lang, sentence, covered, replacement, correction, dataset)); } } } for (Thread t : threads) { t.join(); } logger.join(10000L); logger.interrupt(); datasetWriter.close(); }
From source file:com.chinamobile.bcbsp.comm.MessageQueuesForDisk.java
/** * ToString method for message queue.// ww w . ja v a 2 s . c om * @return String */ private String queueToString(ConcurrentLinkedQueue<IMessage> queue) { String buffer; buffer = queue.poll().intoString(); IMessage msg; while ((msg = queue.poll()) != null) { buffer = buffer + Constants.SPACE_SPLIT_FLAG + msg.intoString(); } return buffer; }
From source file:org.restcomm.app.qoslib.Services.Events.EventUploader.java
/** * Persists the queue of events to the phone's preferences *///from w w w .j av a 2 s. c o m protected void saveEvents(ConcurrentLinkedQueue<EventDataEnvelope> eventQueue) { //JSONArray jsonQueue= new JSONArray(); if (eventQueue == null) return; StringBuffer sb = new StringBuffer(); sb.append("["); Gson gson = new Gson(); // remove the oldest events until queue is below 400 while (eventQueue.size() > 300) eventQueue.poll(); for (EventDataEnvelope eventEnv : eventQueue) { try { String strJSON = gson.toJson(eventEnv); sb.append(strJSON); sb.append(","); //JSONObject evtJSON = new JSONObject(strJSON); //jsonQueue.put(evtJSON); } catch (Exception e) { LoggerUtil.logToFile(LoggerUtil.Level.ERROR, TAG, "persistQueue", "failed to persist event request", e); } } sb.deleteCharAt(sb.length() - 1); sb.append("]"); SharedPreferences preferenceSettings = MainService.getSecurePreferences(owner); String stringQueue = sb.toString();// jsonQueue.toString(); preferenceSettings.edit().putString(PreferenceKeys.Miscellaneous.EVENTS_QUEUE, stringQueue).commit(); }
From source file:metlos.executors.batch.BatchExecutorTest.java
private void runSimpleDelayTest(int nofThreads) throws Exception { final ConcurrentLinkedQueue<Long> executionTimes = new ConcurrentLinkedQueue<Long>(); Runnable task = new Runnable() { @Override//from ww w. ja v a 2 s . co m public void run() { executionTimes.add(System.currentTimeMillis()); } }; BatchExecutor ex = getExecutor(nofThreads); //start running my task... the task should "take" 0ms and there should be a delay //of 10ms between executions... the executionTimes collection should therefore //contain time stamps 10ms apart from each other. ex.submitWithPreferedDurationAndFixedDelay(Collections.singleton(task), 0, 0, 10, TimeUnit.MILLISECONDS); Thread.sleep(1000); ex.shutdown(); assert executionTimes.size() > 1 : "There should have been more than 1 task executed."; long minDelay = 8; //10ms +- 20% long maxDelay = 12; int nofElements = executionTimes.size(); long previousTime = executionTimes.poll(); long cummulativeDiff = 0; while (!executionTimes.isEmpty()) { long thisTime = executionTimes.poll(); long diff = thisTime - previousTime; cummulativeDiff += diff; previousTime = thisTime; } long averageDelay = cummulativeDiff / (nofElements - 1); assert minDelay < averageDelay && averageDelay < maxDelay : "The average delay should be in <" + minDelay + ", " + maxDelay + "> but was " + averageDelay + "."; }
From source file:edu.cornell.mannlib.vitro.webapp.rdfservice.impl.sparql.RDFServiceSparql.java
private List<Statement> sort(List<Statement> stmts) { List<Statement> output = new ArrayList<Statement>(); int originalSize = stmts.size(); if (originalSize == 1) return stmts; List<Statement> remaining = stmts; ConcurrentLinkedQueue<com.hp.hpl.jena.rdf.model.Resource> subjQueue = new ConcurrentLinkedQueue<com.hp.hpl.jena.rdf.model.Resource>(); for (Statement stmt : remaining) { if (stmt.getSubject().isURIResource()) { subjQueue.add(stmt.getSubject()); break; }/* w ww . j a va2 s .c o m*/ } if (subjQueue.isEmpty()) { throw new RuntimeException("No named subject in statement patterns"); } while (remaining.size() > 0) { if (subjQueue.isEmpty()) { subjQueue.add(remaining.get(0).getSubject()); } while (!subjQueue.isEmpty()) { com.hp.hpl.jena.rdf.model.Resource subj = subjQueue.poll(); List<Statement> temp = new ArrayList<Statement>(); for (Statement stmt : remaining) { if (stmt.getSubject().equals(subj)) { output.add(stmt); if (stmt.getObject().isResource()) { subjQueue.add((com.hp.hpl.jena.rdf.model.Resource) stmt.getObject()); } } else { temp.add(stmt); } } remaining = temp; } } if (output.size() != originalSize) { throw new RuntimeException( "original list size was " + originalSize + " but sorted size is " + output.size()); } return output; }
From source file:com.huobi.demo.socketio.core.IOConnection.java
/** * Transport connected.// www . ja v a2s. co m * * {@link IOTransport} calls this when a connection is established. */ public synchronized void transportConnected() { setState(STATE_READY); if (reconnectTask != null) { reconnectTask.cancel(); reconnectTask = null; } resetTimeout(); if (transport.canSendBulk()) { ConcurrentLinkedQueue<String> outputBuffer = this.outputBuffer; this.outputBuffer = new ConcurrentLinkedQueue<String>(); try { // DEBUG String[] texts = outputBuffer.toArray(new String[outputBuffer.size()]); logger.debug("Bulk start:"); for (String text : texts) { logger.debug("> " + text); } logger.debug("Bulk end"); // DEBUG END transport.sendBulk(texts); } catch (IOException e) { this.outputBuffer = outputBuffer; } } else { String text; while ((text = outputBuffer.poll()) != null) sendPlain(text); } this.keepAliveInQueue = false; }
From source file:org.sunnycode.zkws.socketio.impl.IOConnection.java
/** * Transport connected.//ww w. j a va 2 s . com * * {@link IOTransport} calls this when a connection is established. */ public synchronized void transportConnected() { setState(STATE_READY); if (reconnectTask != null) { reconnectTask.cancel(); reconnectTask = null; } resetTimeout(); if (transport.canSendBulk()) { ConcurrentLinkedQueue<String> outputBuffer = this.outputBuffer; this.outputBuffer = new ConcurrentLinkedQueue<String>(); try { // DEBUG String[] texts = outputBuffer.toArray(new String[outputBuffer.size()]); logger.info("Bulk start:"); for (String text : texts) { logger.info("> " + text); } logger.info("Bulk end"); // DEBUG END transport.sendBulk(texts); } catch (IOException e) { this.outputBuffer = outputBuffer; } } else { String text; while ((text = outputBuffer.poll()) != null) sendPlain(text); } this.keepAliveInQueue = false; }