Java 类android.media.MediaCodec.BufferInfo 实例源码

项目:DroneControl    文件:BebopActivity.java   
private void configureMediaCodec() {
    mVideoFormat = MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, VIDEO_WIDTH, VIDEO_HEIGHT);
    mVideoFormat.setByteBuffer("csd-0", mCSDBuffer);

    try {
        if (mSurface != null) {
            mVideoDecoder.configure(mVideoFormat, mSurface, null, 0);
            mVideoDecoder.start();

            mVideoInputBuffers = mVideoDecoder.getInputBuffers();
            mVideoBufferInfo = new BufferInfo();

            mIsVideoConfigured = true;
        }
    } catch (Exception e) {
        Logger.e(TAG, "Could not configure mediacodec");
    }
}
项目:AndroidInstantVideo    文件:MediaCodecInputStream.java   
@NonNull
private static BufferInfo copyBufferInfo(BufferInfo lastBufferInfo) {
    BufferInfo bufferInfo = new BufferInfo();
    bufferInfo.presentationTimeUs = lastBufferInfo.presentationTimeUs;
    bufferInfo.flags = lastBufferInfo.flags;
    bufferInfo.offset = lastBufferInfo.offset;
    bufferInfo.size = lastBufferInfo.size;
    return bufferInfo;
}
项目:ROLF-EV3    文件:EncoderDebugger.java   
private void flushMediaCodec(MediaCodec mc) {
    int index = 0;
    BufferInfo info = new BufferInfo();
    while (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
        index = mc.dequeueOutputBuffer(info, 1000000/FRAMERATE);
        if (index>=0) {
            mc.releaseOutputBuffer(index, false);
        }
    }
}
项目:SCR-Screen-Recorder-app    文件:MediaMuxerHack.java   
/**
 * Writes an encoded sample into the muxer.
 * <p>The application needs to make sure that the samples are written into
 * the right tracks. Also, it needs to make sure the samples for each track
 * are written in chronological order (e.g. in the order they are provided
 * by the encoder.)</p>
 * @param byteBuf The encoded sample.
 * @param trackIndex The track index for this sample.
 * @param bufferInfo The buffer information related to this sample.
 * MediaMuxer uses the flags provided in {@link MediaCodec.BufferInfo},
 * to signal sync frames.
 */
public void writeSampleData(int trackIndex, ByteBuffer byteBuf,
                            MediaCodec.BufferInfo bufferInfo) {
    if (trackIndex < 0 || trackIndex > mLastTrackIndex) {
        throw new IllegalArgumentException("trackIndex is invalid");
    }

    if (byteBuf == null) {
        throw new IllegalArgumentException("byteBuffer must not be null");
    }

    if (bufferInfo == null) {
        throw new IllegalArgumentException("bufferInfo must not be null");
    }
    if (bufferInfo.size < 0 || bufferInfo.offset < 0
            || (bufferInfo.offset + bufferInfo.size) > byteBuf.capacity()
            || bufferInfo.presentationTimeUs < 0) {
        throw new IllegalArgumentException("bufferInfo must specify a" +
                " valid buffer offset, size and presentation time");
    }

    if (mNativeObject == 0) {
        throw new IllegalStateException("Muxer has been released!");
    }

    if (mState != MUXER_STATE_STARTED) {
        throw new IllegalStateException("Can't write, muxer is not started");
    }

    nativeWriteSampleData(mNativeObject, trackIndex, byteBuf,
            bufferInfo.offset, bufferInfo.size,
            bufferInfo.presentationTimeUs, bufferInfo.flags);
}
项目:Android_CCTV    文件:EncoderDebugger.java   
private void flushMediaCodec(MediaCodec mc) {
    int index = 0;
    BufferInfo info = new BufferInfo();
    while (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
        index = mc.dequeueOutputBuffer(info, 1000000/FRAMERATE);
        if (index>=0) {
            mc.releaseOutputBuffer(index, false);
        }
    }
}
项目:Endoscope    文件:EncoderDebugger.java   
private void flushMediaCodec(MediaCodec mc) {
    int index = 0;
    BufferInfo info = new BufferInfo();
    while (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
        index = mc.dequeueOutputBuffer(info, 1000000/FRAMERATE);
        if (index>=0) {
            mc.releaseOutputBuffer(index, false);
        }
    }
}
项目:RemoteEye    文件:EncoderDebugger.java   
private void flushMediaCodec(MediaCodec mc) {
    int index = 0;
    BufferInfo info = new BufferInfo();
    while (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
        index = mc.dequeueOutputBuffer(info, 1000000/FRAMERATE);
        if (index>=0) {
            mc.releaseOutputBuffer(index, false);
        }
    }
}
项目:libstreaming_android_studio    文件:EncoderDebugger.java   
private void flushMediaCodec(MediaCodec mc) {
    int index = 0;
    BufferInfo info = new BufferInfo();
    while (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
        index = mc.dequeueOutputBuffer(info, 1000000/FRAMERATE);
        if (index>=0) {
            mc.releaseOutputBuffer(index, false);
        }
    }
}
项目:JJCamera    文件:EncoderDebugger.java   
private void flushMediaCodec(MediaCodec mc) {
    int index = 0;
    BufferInfo info = new BufferInfo();
    while (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
        index = mc.dequeueOutputBuffer(info, 1000000/FRAMERATE);
        if (index>=0) {
            mc.releaseOutputBuffer(index, false);
        }
    }
}
项目:OoDroid2    文件:EncoderDebugger.java   
private void flushMediaCodec(MediaCodec mc) {
    int index = 0;
    BufferInfo info = new BufferInfo();
    while (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
        index = mc.dequeueOutputBuffer(info, 1000000/FRAMERATE);
        if (index>=0) {
            mc.releaseOutputBuffer(index, false);
        }
    }
}
项目:cInterphone    文件:EncoderDebugger.java   
private void flushMediaCodec(MediaCodec mc) {
    int index = 0;
    BufferInfo info = new BufferInfo();
    while (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
        index = mc.dequeueOutputBuffer(info, 1000000/FRAMERATE);
        if (index>=0) {
            mc.releaseOutputBuffer(index, false);
        }
    }
}
项目:Flyver-Apps    文件:EncoderDebugger.java   
private void flushMediaCodec(MediaCodec mc) {
    int index = 0;
    BufferInfo info = new BufferInfo();
    while (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
        index = mc.dequeueOutputBuffer(info, 1000000/FRAMERATE);
        if (index>=0) {
            mc.releaseOutputBuffer(index, false);
        }
    }
}
项目:Flyver-Apps    文件:EncoderDebugger.java   
private void flushMediaCodec(MediaCodec mc) {
    int index = 0;
    BufferInfo info = new BufferInfo();
    while (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
        index = mc.dequeueOutputBuffer(info, 1000000/FRAMERATE);
        if (index>=0) {
            mc.releaseOutputBuffer(index, false);
        }
    }
}
项目:Flyver-Apps    文件:EncoderDebugger.java   
private void flushMediaCodec(MediaCodec mc) {
    int index = 0;
    BufferInfo info = new BufferInfo();
    while (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
        index = mc.dequeueOutputBuffer(info, 1000000/FRAMERATE);
        if (index>=0) {
            mc.releaseOutputBuffer(index, false);
        }
    }
}
项目:SmartRover    文件:EncoderDebugger.java   
private void flushMediaCodec(MediaCodec mc) {
    int index = 0;
    BufferInfo info = new BufferInfo();
    while (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
        index = mc.dequeueOutputBuffer(info, 1000000/FRAMERATE);
        if (index>=0) {
            mc.releaseOutputBuffer(index, false);
        }
    }
}
项目:cameraMediaCodec    文件:AvcDecoder.java   
public int Init()
{
    Log.i("AvcDecoder", "Init");
    mMC = MediaCodec.createDecoderByType(MIME_TYPE);
    mStatus = STATUS_LOADED;
    mBI = new BufferInfo();
    Log.i("AvcDecoder", "Init, createDecoderByType");
    return 0;
}
项目:AndroidVideoSamples    文件:MediaCodecDecodeController.java   
private void setupExtractor() {
   mExtractor = new MediaExtractor();
   try {
      mExtractor.setDataSource( mUri.toString() );
   } catch ( IOException e ) {
      e.printStackTrace();
   }

   int videoIndex = 0;

   for ( int trackIndex = 0; trackIndex < mExtractor.getTrackCount(); trackIndex++ ) {
      MediaFormat format = mExtractor.getTrackFormat( trackIndex );

      String mime = format.getString( MediaFormat.KEY_MIME );
      if ( mime != null ) {
         if ( mime.equals( "video/avc" ) ) {
            mExtractor.selectTrack( trackIndex );
            videoIndex = trackIndex;
            break;
         }
      }
   }

   mDecoder = MediaCodec.createDecoderByType( "video/avc" );
   mDecoder.configure( mExtractor.getTrackFormat( videoIndex ), mSurface, null, 0 );
   mDecoder.start();

   mInfo = new BufferInfo();

   mInputBuffers = mDecoder.getInputBuffers();
   mOutputBuffers = mDecoder.getOutputBuffers();
}
项目:AndroidVideoSamples    文件:MediaCodecDecodeController.java   
private void seekTo( long ms, int seekMode ) {

         // Log.d( TAG, String.format( Locale.US, "seeking to %d", ms ) );

         mExtractor.seekTo( ms * 1000, seekMode );
         mCurrentPosition = (int) mExtractor.getSampleTime() / 1000;
         mTimer.setTime( mCurrentPosition );
         // Log.d( TAG, String.format( Locale.US, "seeking extractor to %d, sample time is now %d", ms, mExtractor.getSampleTime() ) );
         mDecoder.flush();
         mInputBuffers = mDecoder.getInputBuffers();
         mOutputBuffers = mDecoder.getOutputBuffers();

         mInfo = new BufferInfo();
      }
项目:LiveMultimedia    文件:GPUEncoder.java   
/*******************************************************************
* createVideoCodec() creates the video codec which is H264 based
******************************************************************/
public synchronized void createVideoCodec() {
    try {
        Log.w(TAG, "----->createVideoCodec()<-----");
        mCodec.configure(mFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        mCodec.start();
        mBufferInfo = new BufferInfo();
    } catch (IllegalStateException e) {
        Log.e(TAG, "Error in creating video codec failed configuration.");
    }
    Log.w(TAG, "----->end createVideoCodec()<-----");
 }
项目:LiveMultimedia    文件:GPUEncoder.java   
private synchronized void dequeueOutputBuffer(
        MediaCodec codec, ByteBuffer[] outputBuffers,
        int index, MediaCodec.BufferInfo info) {
    if (mAudioFeatureActive) {
        codec.releaseOutputBuffer(index, false);
    }
}
项目:spydroid-ipcamera    文件:EncoderDebugger.java   
private void flushMediaCodec(MediaCodec mc) {
    int index = 0;
    BufferInfo info = new BufferInfo();
    while (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
        index = mc.dequeueOutputBuffer(info, 1000000/FRAMERATE);
        if (index>=0) {
            mc.releaseOutputBuffer(index, false);
        }
    }
}
项目:libstreaming    文件:EncoderDebugger.java   
private void flushMediaCodec(MediaCodec mc) {
    int index = 0;
    BufferInfo info = new BufferInfo();
    while (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
        index = mc.dequeueOutputBuffer(info, 1000000/FRAMERATE);
        if (index>=0) {
            mc.releaseOutputBuffer(index, false);
        }
    }
}
项目:AndroidInstantVideo    文件:MediaCodecInputStream.java   
public BufferInfo getLastBufferInfo() {
    return mBufferInfo;
}
项目:libcommon    文件:MediaMuxerWrapper.java   
@Override
public void writeSampleData(final int trackIndex, @NonNull final ByteBuffer byteBuf, @NonNull final BufferInfo bufferInfo) {
    if (!mReleased) {
        mMuxer.writeSampleData(trackIndex, byteBuf, bufferInfo);
    }
}
项目:ROLF-EV3    文件:MediaCodecInputStream.java   
public BufferInfo getLastBufferInfo() {
    return mBufferInfo;
}
项目:ROLF-EV3    文件:EncoderDebugger.java   
private long encode() {
    int n = 0;
    long elapsed = 0, now = timestamp();
    int encOutputIndex = 0, encInputIndex = 0;
    BufferInfo info = new BufferInfo();
    ByteBuffer[] encInputBuffers = mEncoder.getInputBuffers();
    ByteBuffer[] encOutputBuffers = mEncoder.getOutputBuffers();

    while (elapsed<5000000) {
        // Feeds the encoder with an image
        encInputIndex = mEncoder.dequeueInputBuffer(1000000/FRAMERATE);
        if (encInputIndex>=0) {
            check(encInputBuffers[encInputIndex].capacity()>=mData.length, "The input buffer is not big enough.");
            encInputBuffers[encInputIndex].clear();
            encInputBuffers[encInputIndex].put(mData, 0, mData.length);
            mEncoder.queueInputBuffer(encInputIndex, 0, mData.length, timestamp(), 0);
        } else {
            if (VERBOSE) Log.d(TAG,"No buffer available !");
        }

        // Tries to get a NAL unit
        encOutputIndex = mEncoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);
        if (encOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            encOutputBuffers = mEncoder.getOutputBuffers();
        } else if (encOutputIndex>=0) {
            mVideo[n] = new byte[info.size];
            encOutputBuffers[encOutputIndex].clear();
            encOutputBuffers[encOutputIndex].get(mVideo[n++], 0, info.size);
            mEncoder.releaseOutputBuffer(encOutputIndex, false);
            if (n>=NB_ENCODED) {
                flushMediaCodec(mEncoder);
                return elapsed;
            }
        }

        elapsed = timestamp() - now;
    }

    throw new RuntimeException("The encoder is too slow.");

}
项目:Android_CCTV    文件:MediaCodecInputStream.java   
public BufferInfo getLastBufferInfo() {
    return mBufferInfo;
}
项目:Android_CCTV    文件:EncoderDebugger.java   
private long encode() {
    int n = 0;
    long elapsed = 0, now = timestamp();
    int encOutputIndex = 0, encInputIndex = 0;
    BufferInfo info = new BufferInfo();
    ByteBuffer[] encInputBuffers = mEncoder.getInputBuffers();
    ByteBuffer[] encOutputBuffers = mEncoder.getOutputBuffers();

    while (elapsed<5000000) {
        // Feeds the encoder with an image
        encInputIndex = mEncoder.dequeueInputBuffer(1000000/FRAMERATE);
        if (encInputIndex>=0) {
            check(encInputBuffers[encInputIndex].capacity()>=mData.length, "The input buffer is not big enough.");
            encInputBuffers[encInputIndex].clear();
            encInputBuffers[encInputIndex].put(mData, 0, mData.length);
            mEncoder.queueInputBuffer(encInputIndex, 0, mData.length, timestamp(), 0);
        } else {
            if (VERBOSE) Log.d(TAG,"No buffer available !");
        }

        // Tries to get a NAL unit
        encOutputIndex = mEncoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);
        if (encOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            encOutputBuffers = mEncoder.getOutputBuffers();
        } else if (encOutputIndex>=0) {
            mVideo[n] = new byte[info.size];
            encOutputBuffers[encOutputIndex].clear();
            encOutputBuffers[encOutputIndex].get(mVideo[n++], 0, info.size);
            mEncoder.releaseOutputBuffer(encOutputIndex, false);
            if (n>=NB_ENCODED) {
                flushMediaCodec(mEncoder);
                return elapsed;
            }
        }

        elapsed = timestamp() - now;
    }

    throw new RuntimeException("The encoder is too slow.");

}
项目:VideoMeeting    文件:MediaCodecInputStream.java   
public BufferInfo getLastBufferInfo() {
    return mBufferInfo;
}
项目:Endoscope    文件:MediaCodecInputStream.java   
public BufferInfo getLastBufferInfo() {
    return mBufferInfo;
}
项目:Endoscope    文件:EncoderDebugger.java   
private long encode() {
    int n = 0;
    long elapsed = 0, now = timestamp();
    int encOutputIndex = 0, encInputIndex = 0;
    BufferInfo info = new BufferInfo();
    ByteBuffer[] encInputBuffers = mEncoder.getInputBuffers();
    ByteBuffer[] encOutputBuffers = mEncoder.getOutputBuffers();

    while (elapsed<5000000) {
        // Feeds the encoder with an image
        encInputIndex = mEncoder.dequeueInputBuffer(1000000/FRAMERATE);
        if (encInputIndex>=0) {
            check(encInputBuffers[encInputIndex].capacity()>=mData.length, "The input buffer is not big enough.");
            encInputBuffers[encInputIndex].clear();
            encInputBuffers[encInputIndex].put(mData, 0, mData.length);
            mEncoder.queueInputBuffer(encInputIndex, 0, mData.length, timestamp(), 0);
        } else {
            if (VERBOSE) Log.d(TAG,"No buffer available !");
        }

        // Tries to get a NAL unit
        encOutputIndex = mEncoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);
        if (encOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            encOutputBuffers = mEncoder.getOutputBuffers();
        } else if (encOutputIndex>=0) {
            mVideo[n] = new byte[info.size];
            encOutputBuffers[encOutputIndex].clear();
            encOutputBuffers[encOutputIndex].get(mVideo[n++], 0, info.size);
            mEncoder.releaseOutputBuffer(encOutputIndex, false);
            if (n>=NB_ENCODED) {
                flushMediaCodec(mEncoder);
                return elapsed;
            }
        }

        elapsed = timestamp() - now;
    }

    throw new RuntimeException("The encoder is too slow.");

}
项目:RemoteEye    文件:MediaCodecInputStream.java   
public BufferInfo getLastBufferInfo() {
    return mBufferInfo;
}
项目:RemoteEye    文件:EncoderDebugger.java   
private long encode() {
    int n = 0;
    long elapsed = 0, now = timestamp();
    int encOutputIndex = 0, encInputIndex = 0;
    BufferInfo info = new BufferInfo();
    ByteBuffer[] encInputBuffers = mEncoder.getInputBuffers();
    ByteBuffer[] encOutputBuffers = mEncoder.getOutputBuffers();

    while (elapsed<5000000) {
        // Feeds the encoder with an image
        encInputIndex = mEncoder.dequeueInputBuffer(1000000/FRAMERATE);
        if (encInputIndex>=0) {
            check(encInputBuffers[encInputIndex].capacity()>=mData.length, "The input buffer is not big enough.");
            encInputBuffers[encInputIndex].clear();
            encInputBuffers[encInputIndex].put(mData, 0, mData.length);
            mEncoder.queueInputBuffer(encInputIndex, 0, mData.length, timestamp(), 0);
        } else {
            if (VERBOSE) Log.d(TAG,"No buffer available !");
        }

        // Tries to get a NAL unit
        encOutputIndex = mEncoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);
        if (encOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            encOutputBuffers = mEncoder.getOutputBuffers();
        } else if (encOutputIndex>=0) {
            mVideo[n] = new byte[info.size];
            encOutputBuffers[encOutputIndex].clear();
            encOutputBuffers[encOutputIndex].get(mVideo[n++], 0, info.size);
            mEncoder.releaseOutputBuffer(encOutputIndex, false);
            if (n>=NB_ENCODED) {
                flushMediaCodec(mEncoder);
                return elapsed;
            }
        }

        elapsed = timestamp() - now;
    }

    throw new RuntimeException("The encoder is too slow.");

}
项目:libstreaming_android_studio    文件:MediaCodecInputStream.java   
public BufferInfo getLastBufferInfo() {
    return mBufferInfo;
}
项目:libstreaming_android_studio    文件:EncoderDebugger.java   
private long encode() {
    int n = 0;
    long elapsed = 0, now = timestamp();
    int encOutputIndex = 0, encInputIndex = 0;
    BufferInfo info = new BufferInfo();
    ByteBuffer[] encInputBuffers = mEncoder.getInputBuffers();
    ByteBuffer[] encOutputBuffers = mEncoder.getOutputBuffers();

    while (elapsed<5000000) {
        // Feeds the encoder with an image
        encInputIndex = mEncoder.dequeueInputBuffer(1000000/FRAMERATE);
        if (encInputIndex>=0) {
            check(encInputBuffers[encInputIndex].capacity()>=mData.length, "The input buffer is not big enough.");
            encInputBuffers[encInputIndex].clear();
            encInputBuffers[encInputIndex].put(mData, 0, mData.length);
            mEncoder.queueInputBuffer(encInputIndex, 0, mData.length, timestamp(), 0);
        } else {
            if (VERBOSE) Log.d(TAG,"No buffer available !");
        }

        // Tries to get a NAL unit
        encOutputIndex = mEncoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);
        if (encOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            encOutputBuffers = mEncoder.getOutputBuffers();
        } else if (encOutputIndex>=0) {
            mVideo[n] = new byte[info.size];
            encOutputBuffers[encOutputIndex].clear();
            encOutputBuffers[encOutputIndex].get(mVideo[n++], 0, info.size);
            mEncoder.releaseOutputBuffer(encOutputIndex, false);
            if (n>=NB_ENCODED) {
                flushMediaCodec(mEncoder);
                return elapsed;
            }
        }

        elapsed = timestamp() - now;
    }

    throw new RuntimeException("The encoder is too slow.");

}
项目:JJCamera    文件:MediaCodecInputStream.java   
public BufferInfo getLastBufferInfo() {
    return mBufferInfo;
}
项目:JJCamera    文件:EncoderDebugger.java   
private long encode() {
    int n = 0;
    long elapsed = 0, now = timestamp();
    int encOutputIndex = 0, encInputIndex = 0;
    BufferInfo info = new BufferInfo();
    ByteBuffer[] encInputBuffers = mEncoder.getInputBuffers();
    ByteBuffer[] encOutputBuffers = mEncoder.getOutputBuffers();

    while (elapsed<5000000) {
        // Feeds the encoder with an image
        encInputIndex = mEncoder.dequeueInputBuffer(1000000/FRAMERATE);
        if (encInputIndex>=0) {
            check(encInputBuffers[encInputIndex].capacity()>=mData.length, "The input buffer is not big enough.");
            encInputBuffers[encInputIndex].clear();
            encInputBuffers[encInputIndex].put(mData, 0, mData.length);
            mEncoder.queueInputBuffer(encInputIndex, 0, mData.length, timestamp(), 0);
        } else {
            if (VERBOSE) Log.d(TAG,"No buffer available !");
        }

        // Tries to get a NAL unit
        encOutputIndex = mEncoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);
        if (encOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            encOutputBuffers = mEncoder.getOutputBuffers();
        } else if (encOutputIndex>=0) {
            mVideo[n] = new byte[info.size];
            encOutputBuffers[encOutputIndex].clear();
            encOutputBuffers[encOutputIndex].get(mVideo[n++], 0, info.size);
            mEncoder.releaseOutputBuffer(encOutputIndex, false);
            if (n>=NB_ENCODED) {
                flushMediaCodec(mEncoder);
                return elapsed;
            }
        }

        elapsed = timestamp() - now;
    }

    throw new RuntimeException("The encoder is too slow.");

}
项目:binea_project_for_android    文件:VideoEncoderFromBuffer.java   
@SuppressLint("NewApi")
public VideoEncoderFromBuffer(int width, int height) {
    Log.i(TAG, "VideoEncoder()");
    this.mWidth = width;
    this.mHeight = height;
    mFrameData = new byte[this.mWidth * this.mHeight * 3 / 2];

    mBufferInfo = new MediaCodec.BufferInfo();
    MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
    if (codecInfo == null) {
        // Don't fail CTS if they don't have an AVC codec (not here,
        // anyway).
        Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
        return;
    }
    if (VERBOSE)
        Log.d(TAG, "found codec: " + codecInfo.getName());
    mColorFormat = selectColorFormat(codecInfo, MIME_TYPE);
    if (VERBOSE)
        Log.d(TAG, "found colorFormat: " + mColorFormat);
    MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE,
            this.mWidth, this.mHeight);
    mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
    mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
    mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, mColorFormat);
    mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL,
            IFRAME_INTERVAL);
    if (VERBOSE)
        Log.d(TAG, "format: " + mediaFormat);
    mMediaCodec = MediaCodec.createByCodecName(codecInfo.getName());
    mMediaCodec.configure(mediaFormat, null, null,
            MediaCodec.CONFIGURE_FLAG_ENCODE);
    mMediaCodec.start();

    String fileName = DEBUG_FILE_NAME_BASE + this.mWidth + "x"
            + this.mHeight + ".mp4";
    mFileName = fileName;
    Log.i(TAG, "videofile: " + fileName);
    // try {
    // mFileOutputStream = new FileOutputStream(fileName);
    // } catch (IOException e) {
    // System.out.println(e);
    // } catch (Exception e) {
    // System.out.println(e);
    // }

    mStartTime = System.nanoTime();

    // Create a MediaMuxer.  We can't add the video track and start() the muxer here,
       // because our MediaFormat doesn't have the Magic Goodies.  These can only be
       // obtained from the encoder after it has started processing data.
       //
       // We're not actually interested in multiplexing audio.  We just want to convert
       // the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
    try {
        mMuxer = new MediaMuxer(fileName.toString(),
                MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
    } catch (IOException ioe) {
        throw new RuntimeException("MediaMuxer creation failed", ioe);
    }
    mTrackIndex = -1;
    mMuxerStarted = false;
}
项目:OoDroid2    文件:MediaCodecInputStream.java   
public BufferInfo getLastBufferInfo() {
    return mBufferInfo;
}
项目:OoDroid2    文件:EncoderDebugger.java   
private long encode() {
    int n = 0;
    long elapsed = 0, now = timestamp();
    int encOutputIndex = 0, encInputIndex = 0;
    BufferInfo info = new BufferInfo();
    ByteBuffer[] encInputBuffers = mEncoder.getInputBuffers();
    ByteBuffer[] encOutputBuffers = mEncoder.getOutputBuffers();

    while (elapsed<5000000) {
        // Feeds the encoder with an image
        encInputIndex = mEncoder.dequeueInputBuffer(1000000/FRAMERATE);
        if (encInputIndex>=0) {
            check(encInputBuffers[encInputIndex].capacity()>=mData.length, "The input buffer is not big enough.");
            encInputBuffers[encInputIndex].clear();
            encInputBuffers[encInputIndex].put(mData, 0, mData.length);
            mEncoder.queueInputBuffer(encInputIndex, 0, mData.length, timestamp(), 0);
        } else {
            if (VERBOSE) Log.d(TAG,"No buffer available !");
        }

        // Tries to get a NAL unit
        encOutputIndex = mEncoder.dequeueOutputBuffer(info, 1000000/FRAMERATE);
        if (encOutputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            encOutputBuffers = mEncoder.getOutputBuffers();
        } else if (encOutputIndex>=0) {
            mVideo[n] = new byte[info.size];
            encOutputBuffers[encOutputIndex].clear();
            encOutputBuffers[encOutputIndex].get(mVideo[n++], 0, info.size);
            mEncoder.releaseOutputBuffer(encOutputIndex, false);
            if (n>=NB_ENCODED) {
                flushMediaCodec(mEncoder);
                return elapsed;
            }
        }

        elapsed = timestamp() - now;
    }

    throw new RuntimeException("The encoder is too slow.");

}