List of usage examples for org.apache.commons.collections BufferOverflowException printStackTrace
public void printStackTrace()
From source file:com.stainlesscode.mediapipeline.packetdecoder.MultispeedAudioPacketDecoder.java
@SuppressWarnings("unchecked") public void decodePacket(IPacket packet) { if (LogUtil.isDebugEnabled()) { LogUtil.debug("buffer size = " + engineRuntime.getAudioFrameBuffer().size()); LogUtil.debug("decode audio packet " + packet.getTimeStamp()); }/*ww w. j av a 2s .com*/ try { if (nextFrame) { if (LogUtil.isDebugEnabled()) { LogUtil.debug("starting a new frame"); } samples = (IAudioSamples) engineRuntime.getAudioSamplePool().borrowObject(); nextFrame = false; } int offset = 0; /* * Keep going until we've processed all data */ while (offset < packet.getSize()) { int bytesDecoded = engineRuntime.getAudioCoder().decodeAudio(samples, packet, offset); if (bytesDecoded < 0) throw new RuntimeException("got error decoding audio"); offset += bytesDecoded; if (LogUtil.isDebugEnabled()) LogUtil.debug("decoded " + offset + " total bytes from packet"); } /* * Some decoder will consume data in a packet, but will not be able * to construct a full set of samples yet. Therefore you should * always check if you got a complete set of samples from the * decoder */ if (samples.isComplete()) { int modulo = 0; speed = new Double(engineRuntime.getPlaySpeed()).intValue(); if (speed > 0) { modulo = (multispeedPacketCounter++) % speed; if (LogUtil.isDebugEnabled()) { LogUtil.debug("speed factor is " + speed); LogUtil.debug("modulo is " + modulo); } } if (modulo == 0) { try { engineRuntime.getAudioFrameBuffer().add(samples); if (LogUtil.isDebugEnabled()) LogUtil.debug("---> Decoded an audio frame"); nextFrame = true; } catch (BufferOverflowException e) { e.printStackTrace(); } } } if (packet != null) engineRuntime.getPacketPool().returnObject(packet); } catch (NoSuchElementException e) { e.printStackTrace(); } catch (IllegalStateException e) { e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } }
From source file:com.stainlesscode.mediapipeline.packetdecoder.DefaultAudioPacketDecoder.java
@SuppressWarnings("unchecked") public void decodePacket(IPacket packet) { if (LogUtil.isDebugEnabled()) { LogUtil.debug("buffer size = " + engineRuntime.getAudioFrameBuffer().size()); LogUtil.debug("decode audio packet " + packet.getTimeStamp()); }/* w w w .j a v a 2 s. co m*/ try { if (nextFrame) { if (LogUtil.isDebugEnabled()) { LogUtil.debug("starting a new frame"); } if (engineRuntime.getEngine().getEngineConfiguration() .getConfigurationValueAsBoolean(EngineConfiguration.USE_OBJECT_POOLS)) { samples = (IAudioSamples) engineRuntime.getAudioSamplePool().borrowObject(); } else { samples = IAudioSamples.make(1024, engineRuntime.getAudioCoder().getChannels()); } nextFrame = false; } int offset = 0; /* * Keep going until we've processed all data */ while (offset < packet.getSize()) { int bytesDecoded = engineRuntime.getAudioCoder().decodeAudio(samples, packet, offset); if (bytesDecoded < 0) throw new RuntimeException("got error decoding audio"); offset += bytesDecoded; if (LogUtil.isDebugEnabled()) LogUtil.debug("decoded " + offset + " total bytes from packet"); } returnBorrowed(packet); /* * Some decoder will consume data in a packet, but will not be able * to construct a full set of samples yet. Therefore you should * always check if you got a complete set of samples from the * decoder */ if (samples.isComplete()) { try { if (firstTimestamp) { LogUtil.info("First audio PTS is " + samples.getTimeStamp()); firstTimestamp = false; } // XXX audio drives sync with this code if (!engineRuntime.getSynchronizer().isStreamTimeZeroSet()) { engineRuntime.getSynchronizer().setStreamTimeZero(samples.getTimeStamp(), true); } engineRuntime.getAudioFrameBuffer().add(samples); if (LogUtil.isDebugEnabled()) { LogUtil.debug("$$STORE AUDIO FRAME " + samples.getTimeStamp()); } nextFrame = true; } catch (BufferOverflowException e) { e.printStackTrace(); } } } catch (NoSuchElementException e) { e.printStackTrace(); } catch (IllegalStateException e) { e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } }
From source file:com.stainlesscode.mediapipeline.packetdecoder.DefaultVideoPacketDecoder.java
public void decodePacket(IPacket packet) { if (LogUtil.isDebugEnabled()) LogUtil.debug("decode video packet " + packet.getTimeStamp()); try {/* ww w . j a v a2s .c om*/ if (picture == null) { if (engineRuntime.getEngine().getEngineConfiguration() .getConfigurationValueAsBoolean(EngineConfiguration.USE_OBJECT_POOLS)) { picture = (IVideoPicture) engineRuntime.getRawPicturePool().borrowObject(); } else { picture = IVideoPicture.make(engineRuntime.getVideoCoder().getPixelType(), engineRuntime.getVideoCoder().getWidth(), engineRuntime.getVideoCoder().getHeight()); } } int offset = 0; while (offset < packet.getSize()) { int bytesDecoded = engineRuntime.getVideoCoder().decodeVideo(picture, packet, offset); if (bytesDecoded < 0) throw new RuntimeException("got error decoding video " + bytesDecoded); offset += bytesDecoded; } returnBorrowed(packet); if (picture.isComplete()) { try { if (firstTimestamp) { LogUtil.info("First video PTS is " + picture.getTimeStamp()); firstTimestamp = false; } // XXX video drives sync with this code. // if (!((MultispeedVptsSynchronizer) engineRuntime // .getSynchronizer()).isStreamTimeZeroSet()) { // // ((MultispeedVptsSynchronizer) engineRuntime // .getSynchronizer()).setStreamTimeZero(picture // .getTimeStamp(), true); // } resampleAndCache(picture); if (engineRuntime.getEngine().getEngineConfiguration() .getConfigurationValueAsBoolean(EngineConfiguration.USE_OBJECT_POOLS)) { engineRuntime.getRawPicturePool().returnObject(picture); } picture = null; return; } catch (BufferOverflowException e) { return; } } } catch (NoSuchElementException e) { e.printStackTrace(); } catch (IllegalStateException e) { e.printStackTrace(); } catch (Exception e) { e.printStackTrace(); } }