Example usage for org.apache.commons.collections.buffer CircularFifoBuffer CircularFifoBuffer

List of usage examples for org.apache.commons.collections.buffer CircularFifoBuffer CircularFifoBuffer

Introduction

In this page you can find the example usage for org.apache.commons.collections.buffer CircularFifoBuffer CircularFifoBuffer.

Prototype

public CircularFifoBuffer(Collection coll) 

Source Link

Document

Constructor that creates a buffer from the specified collection.

Usage

From source file:edu.polyu.screamalert.SoundProcessing.java

@SuppressWarnings("unchecked")
public static void initialize(Context context) {
    thisContext = context;//from  ww  w.ja  va  2 s. c  o  m
    x = new float[samplePerFrm];
    subx = new float[frameShift]; // Samples in a sub-frame
    mfcc = new MFCC(samplePerFrm, Config.RECORDER_SAMPLERATE, numMfcc);
    ccBuf = new CircularFifoBuffer(K); // Buffer storing K mfcc vectors
    dccBuf = new CircularFifoBuffer(K); // Buffer storing K delta-mfcc vectors
    pDet = new YinPitchDetector(Config.RECORDER_SAMPLERATE, samplePerFrm);
    piBuf = new CircularFifoBuffer(K);
    pkBuf = new CircularFifoBuffer(K);
    mu_z = new double[] { VoiceQuality.JITTER_MEAN, VoiceQuality.SHIMMER_MEAN }; // Jitter and shimmer mean
    sigma_z = new double[] { VoiceQuality.JITTER_STD, VoiceQuality.SHIMMER_STD }; // Jitter and shimmer stddev
    aList = new ArrayList<double[]>();
    enBuf = new CircularFifoBuffer(enBufSize); // Circular buffer storing energy profile of the latest K frames
    frmBuf = new CircularFifoBuffer(K); // Buffer storing K latest frames of audio signals */

    for (int k = 0; k < K; k++) {
        ccBuf.add(new double[numMfcc + 1]); // Initialize MFCC FIFO buffers
        dccBuf.add(new double[numMfcc + 1]); // Initialize delta MFCC FIFO buffers
        piBuf.add(-1.0D); // Initialize pitch FIFO buffer for computing jitter and shimmer
        pkBuf.add(0.0D); // Initialize peak amplitude FIFO buffer for computing shimmer
        frmBuf.add(new double[samplePerFrm]); // Initialize frame buffer
    }
    for (int k = 0; k < enBufSize; k++) {
        enBuf.add(new double[1]); // Initialize energy buffer
    }
    mainIntent = new Intent(thisContext, SoundProcessingSetting.class);
    mainIntent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_SINGLE_TOP);
    pIntent = PendingIntent.getActivity(thisContext, 0, mainIntent, 0); // Go back to MainActivity when user press the notification
    noti = new NotificationCompat.Builder(thisContext)
            .setContentTitle(
                    thisContext.getString(R.string.app_name) + " " + thisContext.getString(R.string.running))
            .setContentText(thisContext.getString(R.string.configure)).setSmallIcon(R.drawable.ic_launcher)
            .setContentIntent(pIntent).build();
    noti.flags = Notification.FLAG_FOREGROUND_SERVICE;
    if (SoundProcessingActivity.thisActivity == null)
        calibrateDialog = new ProgressDialog(Exchanger.thisContext);
    else
        calibrateDialog = new ProgressDialog(SoundProcessingActivity.thisActivity);
}

From source file:com.neophob.sematrix.jmx.PixelControllerStatus.java

/**
 * Track output time./*w  ww.  j  a  v a2 s .  co  m*/
 *
 * @param output the output
 * @param outputValueEnum the output value enum
 * @param time the time
 */
public void trackOutputTime(Output output, TimeMeasureItemOutput timeMeasureItem, long time) {
    if (this.ignoreValue()) {
        return;
    }
    // make sure the output instance is known inside the outputBuffers instance
    if (!this.timeMeasureMapOutput.containsKey(output)) {
        this.timeMeasureMapOutput.put(output, new HashMap<TimeMeasureItemOutput, CircularFifoBuffer>());
        this.outputList.add(output);
    }
    // make sure a circularFifoBuffer instance was construct for the given outputValueEnum
    if (!this.timeMeasureMapOutput.get(output).containsKey(timeMeasureItem)) {
        this.timeMeasureMapOutput.get(output).put(timeMeasureItem,
                new CircularFifoBuffer(this.configuredFps * SECONDS));
    }
    // add time to internal buffer instance
    this.timeMeasureMapOutput.get(output).get(timeMeasureItem).add(time);
}

From source file:com.neophob.sematrix.core.jmx.PixelControllerStatus.java

/**
 * Track output time.// w  ww  .ja v a  2  s.c o m
 *
 * @param output the output
 * @param outputValueEnum the output value enum
 * @param time the time
 */
public void trackOutputTime(IOutput output, TimeMeasureItemOutput timeMeasureItem, long time) {
    if (this.ignoreValue()) {
        return;
    }
    // make sure the output instance is known inside the outputBuffers instance
    if (!this.timeMeasureMapOutput.containsKey(output)) {
        this.timeMeasureMapOutput.put(output, new HashMap<TimeMeasureItemOutput, CircularFifoBuffer>());
        this.outputList.add(output);
    }
    // make sure a circularFifoBuffer instance was construct for the given outputValueEnum
    if (!this.timeMeasureMapOutput.get(output).containsKey(timeMeasureItem)) {
        this.timeMeasureMapOutput.get(output).put(timeMeasureItem,
                new CircularFifoBuffer(this.configuredFps * SECONDS));
    }
    // add time to internal buffer instance
    this.timeMeasureMapOutput.get(output).get(timeMeasureItem).add(time);
}

From source file:de.suse.swamp.modules.actions.WorkflowActions.java

/**
 * Is called when a task ok comes in./*w w w. ja  v  a2 s  . c o m*/
 */
public void doTaskok(RunData data, Context context) throws Exception {
    Logger.LOG("doTaskok() from webapp.");
    SWAMPUser user = ((SWAMPTurbineUser) data.getUser()).getSWAMPUser();
    WorkflowAPI wfapi = new WorkflowAPI();
    TaskAPI taskapi = new TaskAPI();
    ResultList history = new ResultList();

    if (data.getParameters().containsKey("taskid")) {
        // get the task we're working on
        int taskId = data.getParameters().getInt("taskid");
        WorkflowTask task = taskapi.doGetTask(taskId, user.getUserName());
        String taskType = null;
        ArrayList validationErrors = new ArrayList();

        // Check for availability of that Task:
        if (task != null && task.getState() == WorkflowTask.ACTIVE) {
            // get the action type of the task
            taskType = task.getActionType();

            // fill in the result for different
            if (taskType.equals("manualtask")) {
                ManualtaskResult result = (ManualtaskResult) task.getResult();
                result.setDone(true);

            } else if (taskType.equals("decision")) {
                int answer = -1;
                // get the answer given
                if (data.getParameters().containsKey("answer")) {
                    answer = data.getParameters().getInt("answer");
                    Logger.DEBUG("Answer #" + answer);
                    // if no answer selected, log error
                } else {
                    Logger.ERROR("in doTaskok: no answer on question given.");
                }
                // put selection into result
                DecisionResult result = (DecisionResult) task.getResult();
                result.setSelection(answer);
            } else if (taskType.equals("dataedit")) {
                DataeditResult result = (DataeditResult) task.getResult();
                context.put("result", result);

                DataeditActionTemplate action = (DataeditActionTemplate) task.getActionTemplate();
                HashMap actionFields = action.getAllFields(task.getWorkflowId());
                Workflow wf = wfapi.getWorkflow(task.getWorkflowId(), user.getUserName());

                // put all values in the result object
                for (Iterator iter = actionFields.keySet().iterator(); iter.hasNext();) {
                    ArrayList setField = (ArrayList) actionFields.get(iter.next());
                    for (Iterator it = setField.iterator(); it.hasNext();) {
                        Field f = (Field) it.next();
                        String fieldpath = f.getPath();
                        String field = "field_" + fieldpath;
                        if (data.getParameters().containsKey(field)) {
                            // binary data need extra storage
                            if (f.getDatatype().equals("fileref")) {
                                FileItem value = data.getParameters().getFileItem(field);
                                Logger.DEBUG("Value for key (file)" + field + ": " + value);
                                // need to store the file now
                                Databit dbit = wf.getDatabit(fieldpath);
                                if (DatapackActions.storeFile(dbit, true, value, user.getUserName())) {
                                    String fileName = value.getName();
                                    // fix for browsers setting complete path as name: 
                                    if (fileName.indexOf("\\") >= 0)
                                        fileName = fileName.substring(fileName.lastIndexOf("\\") + 1);
                                    if (fileName.indexOf("/") >= 0)
                                        fileName = fileName.substring(fileName.lastIndexOf("/") + 1);
                                    result.setValue(fieldpath, fileName);
                                }
                            } else if (f.getDatatype().equalsIgnoreCase("multienum")) {
                                SWAMPHashSet values = new SWAMPHashSet(data.getParameters().getStrings(field));
                                result.setValue(fieldpath, values.toString(", "));
                            } else if (f.getDatatype().equalsIgnoreCase("patchdocumd")) {
                                String value = data.getParameters().get(field);
                                Logger.DEBUG("Value for key " + field + ": " + value);
                                result.setValue(fieldpath, value);
                            } else {
                                String value = StringEscapeUtils.unescapeHtml(data.getParameters().get(field));
                                Logger.DEBUG("Value for key " + field + ": " + value);
                                result.setValue(fieldpath, value);
                            }
                        } else if (data.getParameters().containsKey("boolean_" + fieldpath)) {
                            result.setValue(fieldpath, "false");
                        } else if (!f.isMandatory()) {
                            // don't complain about missing, non-mandatory fields
                        } else {
                            Logger.ERROR("Mandatory field " + fieldpath + " not set.");
                        }
                    }
                }
            }
            // validate task result
            validationErrors = task.validate();

            // if everything is ok, try to finish the task
            if (validationErrors.size() == 0) {
                try {
                    taskapi.finishTask(task, user.getUserName(), history);
                } catch (Exception e) {
                    e.printStackTrace();
                    validationErrors.add(e.getMessage());
                }
            }

            if (validationErrors.size() == 0) {
                Logger.LOG("Webapp: Done with working on task with id " + task.getId());

                WorkflowTask wftask = task;
                Workflow wf = wfapi.getWorkflow(wftask.getWorkflowId(), user.getUserName());

                context.put("statusheader", "Success");
                context.put("statusmessage", "Task \"" + task.getReplacedDescription() + "\" done in workflow "
                        + wf.getName() + ".");

                context.put("statusclass", "success");
                context.put("icon", "ok");
                context.put("history", history);
                context.put("workflow", wf);

                // add general Workflow Help
                SWAMPScreen.addHelplink(wf.getTemplate(), context, user.getUserName());
                ArrayList helps = new ArrayList();
                if (context.get("helps") != null) {
                    helps = (ArrayList) context.get("helps");
                }

                // add helplinks if there are new Tasks:
                if (wf.getActiveTasks().size() > 0) {
                    List activeTasks = wf.getActiveTasks();
                    for (Iterator it = activeTasks.iterator(); it.hasNext();) {
                        WorkflowTask helptask = (WorkflowTask) it.next();
                        String helpConext = helptask.getActionTemplate().getHelpContext();
                        if (helpConext != null && !helpConext.equals("")) {
                            ContextHelp help = new DocumentationAPI().getContextHelp(helpConext,
                                    user.getUserName());
                            if (help != null && !helps.contains(help)) {
                                helps.add(help);
                            }
                        }
                    }
                    context.put("helps", helps);
                }

                if (user.getPerm("taskpage", "results").equals("workflow")) {
                    Logger.DEBUG("Doing redirect to workflow page after task for " + user.getUserName());
                    setTemplate(data, "DisplayWorkflow.vm");
                } else if (user.getPerm("taskpage", "results").equals("previous")) {
                    CircularFifoBuffer pageBuffer = (CircularFifoBuffer) data.getUser().getTemp("pageBuffer",
                            new CircularFifoBuffer(2));
                    SWAMPHashMap params = (SWAMPHashMap) pageBuffer.get();
                    if (params != null && params.containsKey("template")) {
                        Logger.DEBUG("Redirect to previous page (" + params.get("template") + ") for "
                                + user.getUserName());
                        data.getParameters().clear();
                        for (Iterator it = params.keySet().iterator(); it.hasNext();) {
                            String key = (String) it.next();
                            data.getParameters().add(key, (String) params.get(key));
                        }
                        setTemplate(data, (String) params.get("template"));
                    } else {
                        Logger.WARN("Desired redirect not possible, no pageBuffer");
                    }
                }

                // if there were errors during validation, log the error
            } else {
                // go back to the Task-Page
                context.put("taskerror", "true");
                setTemplate(data, "DisplayTask.vm");

                Iterator errIter = validationErrors.iterator();
                String message = "", error;
                while (errIter.hasNext()) {
                    error = (String) errIter.next();
                    message = message + "<br />" + error;
                    Logger.ERROR(error);
                }
                message = message + "<p />Please correct the above mistake!";
                context.put("statusclass", "error");
                context.put("statusheader", "Error validating task");
                context.put("statusmessage", message);
                context.put("icon", "error");

                // fix page buffer
                CircularFifoBuffer pageBuffer = (CircularFifoBuffer) data.getUser().getTemp("pageBuffer",
                        new CircularFifoBuffer(2));
                pageBuffer.add(pageBuffer.get());
            } // end validation

        } else {
            // illegal task requested, redirect
            setTemplate(data, "DisplayTask.vm");
        }

    } else {
        Logger.ERROR("in doTaskok: no task id.");
    } //end taskid
}

From source file:edu.berkeley.compbio.sequtils.strings.MarkovTreeNode.java

/**
 * Computes the total log probability of generating the given sequence fragment under the model.  This differs from
 * {@link #totalProbability(byte[])} in that the sequence fragment is not given explicitly but only as metadata.  Thus
 * its probability may be computed from summary statistics that are already available in the given SequenceFragment
 * rather than from the raw sequence.  Also, because these probabilities are typically very small, the result is
 * returned in log space (indeed implementations will likely compute them in log space).
 *
 * @param sequenceFragment the SequenceFragment whose probability is to be computed
 * @return the natural logarithm of the conditional probability (a double value between 0 and 1, inclusive)
 *//*from  www. ja  va  2  s  .  c  o m*/
public double fragmentLogProbability(final SequenceFragment sequenceFragment, final boolean perSample)
        throws SequenceSpectrumException {
    // the RonPSA implementation uses backlinks and so is vastly more efficient.
    // We can't use backlinks here because they might point to nodes outside of this subtree

    synchronized (sequenceFragment.getReaderForSynchronizing()) // because of resetting the reader
    {
        final SequenceReader in;
        try {
            in = sequenceFragment.getResetReader();
        } catch (NotEnoughSequenceException e) {
            throw new SequenceSpectrumRuntimeException(e);
        }
        final int requiredPrefixLength = getMaxDepth() - 1;
        double logprob = 0;
        final CircularFifoBuffer prefix = new CircularFifoBuffer(requiredPrefixLength);

        int samples = 0;
        while (true) {
            try {
                final byte c = in.read();

                try {
                    // PERF converting array prefix from circularFifoBuffer to byte[] is terribly inefficient
                    final byte[] prefixAsBytes = DSArrayUtils
                            .toPrimitive((Byte[]) prefix.toArray(new Byte[prefix.size()]));

                    // these log probabilities could be cached, e.g. logConditionalProbability(c, prefix)
                    logprob += MathUtils.approximateLog(conditionalProbability(c, prefixAsBytes));

                    samples++;

                    prefix.add(c);
                } catch (SequenceSpectrumException e) {
                    // probably just an invalid character
                    logger.debug("Invalid character " + (char) c);
                    // ignore this character as far as the probability is concerned
                    prefix.clear();
                }
            } catch (NotEnoughSequenceException e) {
                break;
            } catch (IOException e) {
                logger.error("Error", e);
                throw new SequenceSpectrumException(e);
            } catch (FilterException e) {
                logger.error("Error", e);
                throw new SequenceSpectrumException(e);
            }
        }

        if (perSample) {
            // we have ln(product(p) == sum(ln(p)).
            // The geometric mean is exp(sum(ln(p))/n), so to get ln(geometric mean) we need only divide by n.
            logprob /= samples;
        }

        return logprob;
    }
}

From source file:org.apache.apex.malhar.lib.utils.serde.DefaultBlockReleaseStrategy.java

public DefaultBlockReleaseStrategy(int period) {
    freeBlockNumQueue = new CircularFifoBuffer(period);
}

From source file:org.apache.flink.streaming.state.SlidingWindowState.java

public SlidingWindowState(long windowSize, long slideInterval, long timeUnitInMillis) {
    this.currentRecordCount = 0;
    // here we assume that windowSize and slidingStep is divisible by
    // computationGranularity.
    this.fullRecordCount = (int) (windowSize / timeUnitInMillis);
    this.slideRecordCount = (int) (slideInterval / timeUnitInMillis);
    this.buffer = new CircularFifoBuffer(fullRecordCount);
    this.iterator = new SlidingWindowStateIterator<T>(buffer);
}

From source file:org.apache.tajo.engine.function.window.Lag.java

@Override
public void eval(FunctionContext ctx, Tuple params) {
    LagContext lagCtx = (LagContext) ctx;
    if (lagCtx.lagBuffer == null) {
        int lagNum = 0;
        if (params.size() == 1) {
            lagNum = 1;/*from   w ww.j  a  va2  s  .c om*/
        } else {
            lagNum = params.getInt4(1);
        }
        lagCtx.lagBuffer = new CircularFifoBuffer(lagNum + 1);
    }

    if (!params.isBlankOrNull(0)) {
        lagCtx.lagBuffer.add(params.asDatum(0));
    } else {
        lagCtx.lagBuffer.add(NullDatum.get());
    }

    if (lagCtx.defaultDatum == null) {
        if (params.size() == 3) {
            lagCtx.defaultDatum = params.asDatum(2);
        } else {
            lagCtx.defaultDatum = NullDatum.get();
        }
    }
}

From source file:org.graylog2.log4j.MemoryAppender.java

protected MemoryAppender(String name, Filter filter, Layout<? extends Serializable> layout,
        boolean ignoreExceptions, int bufferSize) {
    super(name, filter, layout, ignoreExceptions);
    this.bufferSize = bufferSize;
    this.buffer = BufferUtils.synchronizedBuffer(new CircularFifoBuffer(bufferSize));
}

From source file:org.jhk.pulsing.web.consumer.Consumer.java

public Consumer(String groupId, int window, String topic) {
    Properties properties = new Properties(_DEFAULT_PROPERTIES);
    properties.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);

    consumer = new KafkaConsumer<>(properties);
    buffer = new CircularFifoBuffer(window);
    this.topic = topic;
}