Java 类android.media.MediaCodec 实例源码

项目:AndroidTvDemo    文件:ExtractorRendererBuilder.java   
@Override
public void buildRenderers(DemoPlayer player) {
  Allocator allocator = new DefaultAllocator(BUFFER_SEGMENT_SIZE);
  Handler mainHandler = player.getMainHandler();

  // Build the video and audio renderers.
  DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, null);
  DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ExtractorSampleSource sampleSource = new ExtractorSampleSource(uri, dataSource, allocator,
      BUFFER_SEGMENT_COUNT * BUFFER_SEGMENT_SIZE, mainHandler, player, 0);
  MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context,
      sampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
      mainHandler, player, 50);
  MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
      MediaCodecSelector.DEFAULT, null, true, mainHandler, player,
      AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);
  TrackRenderer textRenderer = new TextTrackRenderer(sampleSource, player,
      mainHandler.getLooper());

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
  renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
  renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
  renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
  player.onRenderers(renderers, bandwidthMeter);
}
项目:Hotspot-master-devp    文件:AudioTrackTranscoder.java   
private int drainDecoder(long timeoutUs) {
    if (mIsDecoderEOS) return DRAIN_STATE_NONE;

    int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            mAudioChannel.setActualDecodedFormat(mDecoder.getOutputFormat());
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsDecoderEOS = true;
        mAudioChannel.drainDecoderBufferAndQueue(AudioChannel.BUFFER_INDEX_END_OF_STREAM, 0);
    } else if (mBufferInfo.size > 0) {
        mAudioChannel.drainDecoderBufferAndQueue(result, mBufferInfo.presentationTimeUs);
    }

    return DRAIN_STATE_CONSUMED;
}
项目:ShaddockVideoPlayer    文件:ExtractorRendererBuilder.java   
@Override
public void buildRenderers(DemoPlayer player) {
  Allocator allocator = new DefaultAllocator(BUFFER_SEGMENT_SIZE);
  Handler mainHandler = player.getMainHandler();

  // Build the video and audio renderers.
  DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, null);
  DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ExtractorSampleSource sampleSource = new ExtractorSampleSource(uri, dataSource, allocator,
      BUFFER_SEGMENT_COUNT * BUFFER_SEGMENT_SIZE, mainHandler, player, 0);
  MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context,
      sampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
      mainHandler, player, 50);
  MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
      MediaCodecSelector.DEFAULT, null, true, mainHandler, player,
      AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);
  TrackRenderer textRenderer = new TextTrackRenderer(sampleSource, player,
      mainHandler.getLooper());

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
  renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
  renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
  renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
  player.onRenderers(renderers, bandwidthMeter);
}
项目:AAVT    文件:Mp4Provider.java   
@Override
public Point open(SurfaceTexture surface) {
    try {
        if(!extractMedia()){
            return new Point(0,0);
        }
        mFrameSem=new Semaphore(0);
        mDecodeSem=new Semaphore(1);
        videoProvideEndFlag=false;
        isUserWantToStop=false;
        mAudioEncodeTrack=mStore.addTrack(mExtractor.getTrackFormat(mAudioDecodeTrack));
        MediaFormat format=mExtractor.getTrackFormat(mVideoDecodeTrack);
        mVideoDecoder = MediaCodec.createDecoderByType(format.getString(MediaFormat.KEY_MIME));
        mVideoDecoder.configure(format,new Surface(surface),null,0);
        mVideoDecoder.start();
        startDecodeThread();
    } catch (IOException e) {
        e.printStackTrace();
    }
    return mVideoSize;
}
项目:AAVT    文件:SurfaceEncoder.java   
private void openVideoEncoder(){
    AvLog.d(TAG,"openVideoEncoder startTime-->");
    if(mVideoEncoder==null){
        try {
            MediaFormat format=convertVideoConfigToFormat(mConfig.mVideo);
            mVideoEncoder= MediaCodec.createEncoderByType(mConfig.mVideo.mime);
            mVideoEncoder.configure(format,null,null,MediaCodec.CONFIGURE_FLAG_ENCODE);
            super.setSurface(mVideoEncoder.createInputSurface());
            super.setOutputSize(mConfig.mVideo.width,mConfig.mVideo.height);
            mVideoEncoder.start();
            isEncodeStarted=true;
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
    AvLog.d(TAG,"openVideoEncoder endTime-->");
}
项目:live_master    文件:SrsMp4Muxer.java   
private void writeFrameByte(int track, ByteBuffer bb, MediaCodec.BufferInfo bi, boolean isKeyFrame) {
    SrsEsFrame frame = new SrsEsFrame();
    frame.bb = bb;
    frame.bi = bi;
    frame.isKeyFrame = isKeyFrame;
    frame.track = track;

    if (bRecording && !bPaused) {
        if (needToFindKeyFrame) {
            if (frame.isKeyFrame) {
                needToFindKeyFrame = false;
                frameCache.add(frame);
                synchronized (writeLock) {
                    writeLock.notifyAll();
                }
            }
        } else {
            frameCache.add(frame);
            synchronized (writeLock) {
                writeLock.notifyAll();
            }
        }
    }
}
项目:libRtmp    文件:AndroidUntil.java   
@TargetApi(21)
    public static MediaCodec getVideoMediaCodec() {
        int videoWidth = getVideoSize(Options.getInstance().video.width);
        int videoHeight = getVideoSize(Options.getInstance().video.height);
        MediaFormat format = MediaFormat.createVideoFormat(Options.getInstance().video.mime, videoWidth, videoHeight);
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
                MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
        format.setInteger(MediaFormat.KEY_BIT_RATE, Options.getInstance().video.maxBps* 1024);
        int fps = Options.getInstance().video.fps;
        //设置摄像头预览帧率
//        if(BlackListHelper.deviceInFpsBlacklisted()) {
//            SopCastLog.d(SopCastConstant.TAG, "Device in fps setting black list, so set mediacodec fps 15");
//            fps = 15;
//        }
        format.setInteger(MediaFormat.KEY_FRAME_RATE, fps);
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, Options.getInstance().video.ifi);
        format.setInteger(MediaFormat.KEY_BITRATE_MODE, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR);
        format.setInteger(MediaFormat.KEY_COMPLEXITY, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR);
        MediaCodec mediaCodec = null;

        try {
            mediaCodec = MediaCodec.createEncoderByType(Options.getInstance().video.mime);
            mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        }catch (Exception e) {
            e.printStackTrace();
            if (mediaCodec != null) {
                mediaCodec.stop();
                mediaCodec.release();
                mediaCodec = null;
            }
        }
        return mediaCodec;
    }
项目:AI-Powered-Intelligent-Banking-Platform    文件:EncodedAudioRecorder.java   
/**
 * Copy audio from the recorder into the encoder.
 */
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
private int queueInputBuffer(MediaCodec codec, ByteBuffer[] inputBuffers, int index, SpeechRecord speechRecord) {
    if (speechRecord == null || speechRecord.getRecordingState() != SpeechRecord.RECORDSTATE_RECORDING) {
        return -1;
    }

    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
        ByteBuffer inputBuffer = inputBuffers[index];
        inputBuffer.clear();
        int size = inputBuffer.limit();
        byte[] buffer = new byte[size];
        int status = read(speechRecord, buffer);
        if (status < 0) {
            handleError("status = " + status);
            return -1;
        }
        inputBuffer.put(buffer);
        codec.queueInputBuffer(index, 0, size, 0, 0);
        return size;
    }
    return -1;
}
项目:Fatigue-Detection    文件:MediaEncoder.java   
public MediaEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener) {
    if (listener == null) throw new NullPointerException("MediaEncoderListener is null");
    if (muxer == null) throw new NullPointerException("MediaMuxerWrapper is null");
    mWeakMuxer = new WeakReference<MediaMuxerWrapper>(muxer);
    muxer.addEncoder(this);
    mListener = listener;
       synchronized (mSync) {
           // create BufferInfo here for effectiveness(to reduce GC)
           mBufferInfo = new MediaCodec.BufferInfo();
           // wait for starting thread
           new Thread(this, getClass().getSimpleName()).start();
           try {
            mSync.wait();
           } catch (final InterruptedException e) {
           }
       }
}
项目:AI-Powered-Intelligent-Banking-Platform    文件:EncodedAudioRecorder.java   
/**
 * Reads bytes from the given recorder and encodes them with the given encoder.
 * Uses the (deprecated) Synchronous Processing using Buffer Arrays.
 * <p/>
 * Encoders (or codecs that generate compressed data) will create and return the codec specific
 * data before any valid output buffer in output buffers marked with the codec-config flag.
 * Buffers containing codec-specific-data have no meaningful timestamps.
 */
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
private void recorderEncoderLoop(MediaCodec codec, SpeechRecord speechRecord) {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
        codec.start();
        // Getting some buffers (e.g. 4 of each) to communicate with the codec
        ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
        ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
        Log.i("input buffers " + codecInputBuffers.length + "; output buffers: " + codecOutputBuffers.length);
        boolean doneSubmittingInput = false;
        int numRetriesDequeueOutputBuffer = 0;
        int index;
        while (true) {
            if (!doneSubmittingInput) {
                index = codec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
                if (index >= 0) {
                    int size = queueInputBuffer(codec, codecInputBuffers, index, speechRecord);
                    if (size == -1) {
                        codec.queueInputBuffer(index, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        Log.i("enc: in: EOS");
                        doneSubmittingInput = true;
                    } else {
                        Log.i("enc: in: " + size);
                        mNumBytesSubmitted += size;
                    }
                } else {
                    Log.i("enc: in: timeout, will try again");
                }
            }
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            index = codec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
            Log.i("enc: out: flags/index: " + info.flags + "/" + index);
            if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
                Log.i("enc: out: INFO_TRY_AGAIN_LATER: " + numRetriesDequeueOutputBuffer);
                if (++numRetriesDequeueOutputBuffer > MAX_NUM_RETRIES_DEQUEUE_OUTPUT_BUFFER) {
                    break;
                }
            } else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                MediaFormat format = codec.getOutputFormat();
                Log.i("enc: out: INFO_OUTPUT_FORMAT_CHANGED: " + format.toString());
            } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                codecOutputBuffers = codec.getOutputBuffers();
                Log.i("enc: out: INFO_OUTPUT_BUFFERS_CHANGED");
            } else {
                dequeueOutputBuffer(codec, codecOutputBuffers, index, info);
                mNumBytesDequeued += info.size;
                if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    Log.i("enc: out: EOS");
                    break;
                }
            }
        }
        codec.stop();
        codec.release();
    }
}
项目:PlusGram    文件:Track.java   
public void addSample(long offset, MediaCodec.BufferInfo bufferInfo) {
    long delta = bufferInfo.presentationTimeUs - lastPresentationTimeUs;
    if (delta < 0) {
        return;
    }
    boolean isSyncFrame = !isAudio && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
    samples.add(new Sample(offset, bufferInfo.size));
    if (syncSamples != null && isSyncFrame) {
        syncSamples.add(samples.size());
    }

    delta = (delta * timeScale + 500000L) / 1000000L;
    lastPresentationTimeUs = bufferInfo.presentationTimeUs;
    if (!first) {
        sampleDurations.add(sampleDurations.size() - 1, delta);
        duration += delta;
    }
    first = false;
}
项目:Exoplayer2Radio    文件:MediaCodecRenderer.java   
/**
 * @param trackType The track type that the renderer handles. One of the {@code C.TRACK_TYPE_*}
 *     constants defined in {@link C}.
 * @param mediaCodecSelector A decoder selector.
 * @param drmSessionManager For use with encrypted media. May be null if support for encrypted
 *     media is not required.
 * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
 *     For example a media file may start with a short clear region so as to allow playback to
 *     begin in parallel with key acquisition. This parameter specifies whether the renderer is
 *     permitted to play clear regions of encrypted media files before {@code drmSessionManager}
 *     has obtained the keys necessary to decrypt encrypted regions of the media.
 */
public MediaCodecRenderer(int trackType, MediaCodecSelector mediaCodecSelector,
    DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
    boolean playClearSamplesWithoutKeys) {
  super(trackType);
  Assertions.checkState(Util.SDK_INT >= 16);
  this.mediaCodecSelector = Assertions.checkNotNull(mediaCodecSelector);
  this.drmSessionManager = drmSessionManager;
  this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys;
  buffer = new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
  flagsOnlyBuffer = DecoderInputBuffer.newFlagsOnlyInstance();
  formatHolder = new FormatHolder();
  decodeOnlyPresentationTimestamps = new ArrayList<>();
  outputBufferInfo = new MediaCodec.BufferInfo();
  codecReconfigurationState = RECONFIGURATION_STATE_NONE;
  codecReinitializationState = REINITIALIZATION_STATE_NONE;
}
项目:PeSanKita-android    文件:AudioCodec.java   
private void handleCodecOutput(MediaCodec mediaCodec,
                               ByteBuffer[] codecOutputBuffers,
                               MediaCodec.BufferInfo bufferInfo,
                               OutputStream outputStream)
    throws IOException
{
  int codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);

  while (codecOutputBufferIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
    if (codecOutputBufferIndex >= 0) {
      ByteBuffer encoderOutputBuffer = codecOutputBuffers[codecOutputBufferIndex];

      encoderOutputBuffer.position(bufferInfo.offset);
      encoderOutputBuffer.limit(bufferInfo.offset + bufferInfo.size);

      if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
        byte[] header = createAdtsHeader(bufferInfo.size - bufferInfo.offset);


        outputStream.write(header);

        byte[] data = new byte[encoderOutputBuffer.remaining()];
        encoderOutputBuffer.get(data);
        outputStream.write(data);
      }

      encoderOutputBuffer.clear();

      mediaCodec.releaseOutputBuffer(codecOutputBufferIndex, false);
    }  else if (codecOutputBufferIndex== MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
      codecOutputBuffers = mediaCodec.getOutputBuffers();
    }

    codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
  }

}
项目:AI-Powered-Intelligent-Banking-Platform    文件:EncodedAudioRecorder.java   
/**
 * Save the encoded (output) buffer into the complete encoded recording.
 * TODO: copy directly (without the intermediate byte array)
 */
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
private void dequeueOutputBuffer(MediaCodec codec, ByteBuffer[] outputBuffers, int index, MediaCodec.BufferInfo info) {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
        ByteBuffer buffer = outputBuffers[index];
        Log.i("size/remaining: " + info.size + "/" + buffer.remaining());
        if (info.size <= buffer.remaining()) {
            final byte[] bufferCopied = new byte[info.size];
            buffer.get(bufferCopied); // TODO: catch BufferUnderflow
            // TODO: do we need to clear?
            // on N5: always size == remaining(), clearing is not needed
            // on SGS2: remaining decreases until it becomes less than size, which results in BufferUnderflow
            // (but SGS2 records only zeros anyway)
            //buffer.clear();
            codec.releaseOutputBuffer(index, false);
            addEncoded(bufferCopied);
            if (Log.DEBUG) {
                AudioUtils.showSomeBytes("out", bufferCopied);
            }
        } else {
            Log.e("size > remaining");
            codec.releaseOutputBuffer(index, false);
        }
    }
}
项目:Fatigue-Detection    文件:AudioEncoderCore.java   
public AudioEncoderCore(MMediaMuxer MMediaMuxer) throws IOException {
        super(MMediaMuxer);
        final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, 1);
        audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
        audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO);
        audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
        audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
//      audioFormat.setLong(MediaFormat.KEY_MAX_INPUT_SIZE, inputFile.length());
//      audioFormat.setLong(MediaFormat.KEY_DURATION, (long)durationInMs );
        if (VERBOSE) Log.i(TAG, "format: " + audioFormat);
        mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
        mEncoder.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        mEncoder.start();
        if (mAudioThread == null) {
            mAudioThread = new AudioThread();
            mAudioThread.start();
            capturing=true;
            stopped=false;
        }
    }
项目:PlusGram    文件:VideoPlayer.java   
@Override
public void buildRenderers(VideoPlayer player) {
    Allocator allocator = new DefaultAllocator(BUFFER_SEGMENT_SIZE);
    Handler mainHandler = player.getMainHandler();

    TrackRenderer[] renderers = new TrackRenderer[RENDERER_COUNT];
    DataSource dataSource = new DefaultUriDataSource(context, userAgent);
    ExtractorSampleSource sampleSource = new ExtractorSampleSource(uri, dataSource, allocator, BUFFER_SEGMENT_COUNT * BUFFER_SEGMENT_SIZE, mainHandler, null, 0);
    renderers[TYPE_VIDEO] = new MediaCodecVideoTrackRenderer(context, sampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, mainHandler, player, 50) {
        @Override
        protected void doSomeWork(long positionUs, long elapsedRealtimeUs, boolean sourceIsReady) throws ExoPlaybackException {
            super.doSomeWork(positionUs, elapsedRealtimeUs, sourceIsReady);
        }
    };
    renderers[TYPE_AUDIO] = new MediaCodecAudioTrackRenderer(sampleSource, MediaCodecSelector.DEFAULT, null, true, mainHandler, null, AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);
    player.onRenderers(renderers);
}
项目:AndroidRTC    文件:MediaCodecVideoEncoder.java   
void checkKeyFrameRequired(boolean requestedKeyFrame, long presentationTimestampUs) {
  long presentationTimestampMs = (presentationTimestampUs + 500) / 1000;
  if (lastKeyFrameMs < 0) {
    lastKeyFrameMs = presentationTimestampMs;
  }
  boolean forcedKeyFrame = false;
  if (!requestedKeyFrame && forcedKeyFrameMs > 0
      && presentationTimestampMs > lastKeyFrameMs + forcedKeyFrameMs) {
    forcedKeyFrame = true;
  }
  if (requestedKeyFrame || forcedKeyFrame) {
    // Ideally MediaCodec would honor BUFFER_FLAG_SYNC_FRAME so we could
    // indicate this in queueInputBuffer() below and guarantee _this_ frame
    // be encoded as a key frame, but sadly that flag is ignored.  Instead,
    // we request a key frame "soon".
    if (requestedKeyFrame) {
      Logging.d(TAG, "Sync frame request");
    } else {
      Logging.d(TAG, "Sync frame forced");
    }
    Bundle b = new Bundle();
    b.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
    mediaCodec.setParameters(b);
    lastKeyFrameMs = presentationTimestampMs;
  }
}
项目:VideoApplication    文件:AudioDecoder.java   
@Override
protected void output(int outputBufIndex, MediaCodec.BufferInfo bufferInfo) {
    if (mMediaCodec == null) return;
    if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        Log.d(TAG, "output EOS");
    }
    ByteBuffer buf;
    if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) {
        buf = mMediaCodec.getOutputBuffer(outputBufIndex);
    } else {
        buf = mMediaCodec.getOutputBuffers()[outputBufIndex];
    }
    final byte[] chunk = new byte[bufferInfo.size];
    buf.get(chunk); // Read the buffer all at once
    buf.clear(); // ** MUST DO!!! OTHERWISE THE NEXT TIME YOU GET THIS SAME BUFFER BAD THINGS WILL HAPPEN
    if (chunk.length > 0) {
        mAudioTrack.write(chunk, 0, chunk.length);
        mStartTime = adjustPresentationTime(mStartTime, (long) ((double)bufferInfo.presentationTimeUs/mWeakPlayer.get().getPlayRate()));
    }
    mMediaCodec.releaseOutputBuffer(outputBufIndex, false);

}
项目:AndroidInstantVideo    文件:TestAudioEncoder.java   
public void write() {
    MediaCodecInputStream mediaCodecInputStream = aacEncoder.getMediaCodecInputStream();
    MediaCodecInputStream.readAll(mediaCodecInputStream, writeBuffer, new MediaCodecInputStream.OnReadAllCallback() {
        boolean shouldAddPacketHeader = true;
        byte[] header = new byte[7];
        @Override
        public void onReadOnce(byte[] buffer, int readSize, MediaCodec.BufferInfo bufferInfo) {
            if (readSize <= 0) {
                return;
            }
            try {
                Loggers.d("TestAudioEncoder", String.format("onReadOnce: readSize:%d, bufferInfo:%d", readSize, bufferInfo.size));
                if (shouldAddPacketHeader) {
                    Loggers.d("TestAudioEncoder", String.format("onReadOnce: add packet header"));
                    AACEncoder.addADTStoPacket(header, 7 + bufferInfo.size);
                    os.write(header);
                }
                os.write(buffer, 0, readSize);
                os.flush();
            } catch (IOException e) {
                e.printStackTrace();
            }
            shouldAddPacketHeader = readSize >= bufferInfo.size;
        }
    });
}
项目:PlusGram    文件:MediaCodecTrackRenderer.java   
/**
 * @param sources The upstream sources from which the renderer obtains samples.
 * @param mediaCodecSelector A decoder selector.
 * @param drmSessionManager For use with encrypted media. May be null if support for encrypted
 *     media is not required.
 * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
 *     For example a media file may start with a short clear region so as to allow playback to
 *     begin in parallel with key acquisition. This parameter specifies whether the renderer is
 *     permitted to play clear regions of encrypted media files before {@code drmSessionManager}
 *     has obtained the keys necessary to decrypt encrypted regions of the media.
 * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
 *     null if delivery of events is not required.
 * @param eventListener A listener of events. May be null if delivery of events is not required.
 */
public MediaCodecTrackRenderer(SampleSource[] sources, MediaCodecSelector mediaCodecSelector,
    DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
    boolean playClearSamplesWithoutKeys, Handler eventHandler, EventListener eventListener) {
  super(sources);
  Assertions.checkState(Util.SDK_INT >= 16);
  this.mediaCodecSelector = Assertions.checkNotNull(mediaCodecSelector);
  this.drmSessionManager = drmSessionManager;
  this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys;
  this.eventHandler = eventHandler;
  this.eventListener = eventListener;
  deviceNeedsAutoFrcWorkaround = deviceNeedsAutoFrcWorkaround();
  codecCounters = new CodecCounters();
  sampleHolder = new SampleHolder(SampleHolder.BUFFER_REPLACEMENT_MODE_DISABLED);
  formatHolder = new MediaFormatHolder();
  decodeOnlyPresentationTimestamps = new ArrayList<>();
  outputBufferInfo = new MediaCodec.BufferInfo();
  codecReconfigurationState = RECONFIGURATION_STATE_NONE;
  codecReinitializationState = REINITIALIZATION_STATE_NONE;
}
项目:EZFilter    文件:MediaEncoder.java   
protected void encode(final ByteBuffer buffer, final int length, final long presentationTimeUs) {
    if (!mIsCapturing) return;
    final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
    while (mIsCapturing) {
        final int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
        if (inputBufferIndex >= 0) {
            final ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
            inputBuffer.clear();
            if (buffer != null) {
                inputBuffer.put(buffer);
            }
            if (length <= 0) {
                // send EOS
                mIsEOS = true;
                mMediaCodec.queueInputBuffer(inputBufferIndex, 0, 0,
                        presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                break;
            } else {
                mMediaCodec.queueInputBuffer(inputBufferIndex, 0, length,
                        presentationTimeUs, 0);
            }
            break;
        } else if (inputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
            // wait for MediaCodec encoder is ready to encode
            // nothing to do here because MediaCodec#dequeueInputBuffer(TIMEOUT_USEC)
            // will wait for maximum TIMEOUT_USEC(10msec) on each call
        }
    }
}
项目:Fatigue-Detection    文件:EncoderCore.java   
/**
   * Method to set byte array to the MediaCodec encoder
   * @param buffer
   * @param length length of byte array, zero means EOS.
   * @param presentationTimeUs
   */
  protected void encode(final ByteBuffer buffer, final int length, final long presentationTimeUs) {
Log.d(TAG, "encode: "+this.getClass().getSimpleName());
      final ByteBuffer[] inputBuffers = mEncoder.getInputBuffers();
      while (true) {
       final int inputBufferIndex = mEncoder.dequeueInputBuffer(TIMEOUT_USEC);
    Log.d(TAG, "encode: "+this.getClass().getSimpleName()+" "+inputBufferIndex);
            if (inputBufferIndex >= 0) {
           final ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
           inputBuffer.clear();
           if (buffer != null) {
            inputBuffer.put(buffer);
           }
           if (length <= 0) {
            // send EOS
            if (VERBOSE) Log.i(TAG, "send BUFFER_FLAG_END_OF_STREAM");
            mEncoder.queueInputBuffer(inputBufferIndex, 0, 0,
                presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                    break;
           } else {
            mEncoder.queueInputBuffer(inputBufferIndex, 0, length,
                presentationTimeUs, 0);
           }
           break;
       } else if (inputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
        // wait for MediaCodec encoder is ready to encode
        // nothing to do here because MediaCodec#dequeueInputBuffer(TIMEOUT_USEC)
        // will wait for maximum TIMEOUT_USEC(10msec) on each call
       }
      }
  }
项目:rtmp-rtsp-stream-client-java    文件:FromFileBase.java   
@Override
public void getH264Data(ByteBuffer h264Buffer, MediaCodec.BufferInfo info) {
  if (recording) {
    if (info.flags == MediaCodec.BUFFER_FLAG_KEY_FRAME) canRecord = true;
    if (canRecord) {
      mediaMuxer.writeSampleData(videoTrack, h264Buffer, info);
    }
  }
  getH264DataRtp(h264Buffer, info);
}
项目:AndroidInstantVideo    文件:MP4Muxer.java   
@Override
public void writeAudio(byte[] buffer, int offset, int length, MediaCodec.BufferInfo bufferInfo) {
    super.writeAudio(buffer, offset, length, bufferInfo);
    addTrackAndReadyToStart(FramePool.Frame.TYPE_AUDIO);

    Loggers.d(TAG, "writeAudio: ");
    frameSender.sendAddFrameMessage(buffer, offset, length, new BufferInfoEx(bufferInfo, audioTimeIndexCounter.getTimeIndex()), FramePool.Frame.TYPE_AUDIO);
}
项目:AAVT    文件:CameraRecorder.java   
private ByteBuffer getInputBuffer(MediaCodec codec, int index){
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
        return codec.getInputBuffer(index);
    }else{
        return codec.getInputBuffers()[index];
    }
}
项目:live_master    文件:SrsEncoder.java   
private void onEncodedAnnexbFrame(ByteBuffer es, MediaCodec.BufferInfo bi) {
    try {
        ByteBuffer record = es.duplicate();
        mp4Muxer.writeSampleData(videoMp4Track, record, bi);
        flvMuxer.writeSampleData(videoFlvTrack, es, bi);
    } catch (Exception e) {
        Log.e(TAG, "muxer write video sample failed.");
        e.printStackTrace();
    }
}
项目:ScreenRecordCaptureMaster    文件:YXMuxerWrapper.java   
public synchronized void writeMediaData(int trackIndex, ByteBuffer byteBuf, MediaCodec.BufferInfo bufferInfo) {
    if (mMuxerStarted) {
        RecordScreenLogUtil.e(TAG, "当前线程::::" + trackIndex);
        if (mMuxer != null && mMuxerStarted == true)
            mMuxer.writeSampleData(trackIndex, byteBuf, bufferInfo);
    }
}
项目:AAVT    文件:Mp4Provider.java   
private boolean audioDecodeStep(){
    ByteBuffer buffer=ByteBuffer.allocate(1024*64);
    boolean isTimeEnd=false;
    if(isOpenAudio){
        buffer.clear();
        mExtractor.selectTrack(mAudioDecodeTrack);
        MediaCodec.BufferInfo info=new MediaCodec.BufferInfo();
        while (true){
            int length=mExtractor.readSampleData(buffer,0);
            if(length!=-1){
                int flags=mExtractor.getSampleFlags();
                boolean isAudioEnd=mExtractor.getSampleTime()>mVideoStopTimeStamp;
                info.size=length;
                info.flags=isAudioEnd?MediaCodec.BUFFER_FLAG_END_OF_STREAM:flags;
                info.presentationTimeUs=mExtractor.getSampleTime();
                info.offset=0;
                AvLog.d(tag,"audio sampleTime= "+info.presentationTimeUs+"/"+mVideoStopTimeStamp);
                isTimeEnd=mExtractor.getSampleTime()>mVideoStopTimeStamp;
                AvLog.d(tag,"is End= "+isAudioEnd );
                mStore.addData(mAudioEncodeTrack,new HardMediaData(buffer,info));
                if(isAudioEnd){
                    break;
                }
            }else{
                AvLog.d(tag,"is End= "+true );
                info.size=0;
                info.flags=MediaCodec.BUFFER_FLAG_END_OF_STREAM;
                mStore.addData(mAudioEncodeTrack,new HardMediaData(buffer,info));
                isTimeEnd=true;
                break;
            }
            mExtractor.advance();
        }
    }
    return isTimeEnd;
}
项目:EasyScreenRecorder    文件:MediaEncoder.java   
/**
     * Method to set byte array to the MediaCodec encoder
     * @param buffer
     * @param length length of byte array, zero means EOS.
     * @param presentationTimeUs
     */
    protected void encode(final ByteBuffer buffer, final int length, final long presentationTimeUs) {
        if (!mIsCapturing) return;
        final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
        while (mIsCapturing) {
            final int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
            if (inputBufferIndex >= 0) {
                final ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
                inputBuffer.clear();
                if (buffer != null) {
                    inputBuffer.put(buffer);
                }
//              if (DEBUG) Log.v(TAG, "encode:queueInputBuffer");
                if (length <= 0) {
                    // send EOS
                    mIsEOS = true;
                    if (DEBUG) Log.i(TAG, "send BUFFER_FLAG_END_OF_STREAM");
                    mMediaCodec.queueInputBuffer(inputBufferIndex, 0, 0,
                        presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                    break;
                } else {
                    mMediaCodec.queueInputBuffer(inputBufferIndex, 0, length,
                        presentationTimeUs, 0);
                }
                break;
            } else if (inputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
                // wait for MediaCodec encoder is ready to encode
                // nothing to do here because MediaCodec#dequeueInputBuffer(TIMEOUT_USEC)
                // will wait for maximum TIMEOUT_USEC(10msec) on each call
            }
        }
    }
项目:libRtmp    文件:AndroidUntil.java   
@TargetApi(18)
public static MediaCodec getAudioMediaCodec(){
    MediaFormat format = MediaFormat.createAudioFormat(
            Options.getInstance().audio.mime,
            Options.getInstance().audio.frequency,
            Options.getInstance().audio.channelCount);
    if(Options.getInstance().audio.mime.equals(Options.DEFAULT_MIME)) {
        format.setInteger(MediaFormat.KEY_AAC_PROFILE, Options.getInstance().audio.aacProfile);
    }
    format.setInteger(MediaFormat.KEY_BIT_RATE, Options.getInstance().audio.maxBps * 1024);
    format.setInteger(MediaFormat.KEY_SAMPLE_RATE, Options.getInstance().audio.frequency);
    int maxInputSize = getRecordBufferSize();
    format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, maxInputSize);
    format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, Options.getInstance().audio.channelCount);

    MediaCodec mediaCodec = null;
    try {
        mediaCodec = MediaCodec.createEncoderByType(Options.getInstance().audio.mime);
        mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    } catch (Exception e) {
        e.printStackTrace();
        if (mediaCodec != null) {
            mediaCodec.stop();
            mediaCodec.release();
            mediaCodec = null;
        }
    }
    return mediaCodec;
}
项目:Exoplayer2Radio    文件:MediaCodecAudioRenderer.java   
@Override
protected boolean processOutputBuffer(long positionUs, long elapsedRealtimeUs, MediaCodec codec,
    ByteBuffer buffer, int bufferIndex, int bufferFlags, long bufferPresentationTimeUs,
    boolean shouldSkip) throws ExoPlaybackException {
  if (passthroughEnabled && (bufferFlags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
    // Discard output buffers from the passthrough (raw) decoder containing codec specific data.
    codec.releaseOutputBuffer(bufferIndex, false);
    return true;
  }

  if (shouldSkip) {
    codec.releaseOutputBuffer(bufferIndex, false);
    decoderCounters.skippedOutputBufferCount++;
    audioTrack.handleDiscontinuity();
    return true;
  }

  try {
    if (audioTrack.handleBuffer(buffer, bufferPresentationTimeUs)) {
      codec.releaseOutputBuffer(bufferIndex, false);
      decoderCounters.renderedOutputBufferCount++;
      return true;
    }
  } catch (AudioTrack.InitializationException | AudioTrack.WriteException e) {
    throw ExoPlaybackException.createForRenderer(e, getIndex());
  }
  return false;
}
项目:AndroidRTC    文件:MediaCodecVideoEncoder.java   
private boolean setRates(int kbps, int frameRate) {
  checkOnMediaCodecThread();

  int codecBitrateBps = 1000 * kbps;
  if (bitrateAdjustmentType == BitrateAdjustmentType.DYNAMIC_ADJUSTMENT) {
    bitrateAccumulatorMax = codecBitrateBps / 8.0;
    if (targetBitrateBps > 0 && codecBitrateBps < targetBitrateBps) {
      // Rescale the accumulator level if the accumulator max decreases
      bitrateAccumulator = bitrateAccumulator * codecBitrateBps / targetBitrateBps;
    }
  }
  targetBitrateBps = codecBitrateBps;
  targetFps = frameRate;

  // Adjust actual encoder bitrate based on bitrate adjustment type.
  if (bitrateAdjustmentType == BitrateAdjustmentType.FRAMERATE_ADJUSTMENT && targetFps > 0) {
    codecBitrateBps = BITRATE_ADJUSTMENT_FPS * targetBitrateBps / targetFps;
    Logging.v(TAG,
        "setRates: " + kbps + " -> " + (codecBitrateBps / 1000) + " kbps. Fps: " + targetFps);
  } else if (bitrateAdjustmentType == BitrateAdjustmentType.DYNAMIC_ADJUSTMENT) {
    Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + targetFps + ". ExpScale: "
            + bitrateAdjustmentScaleExp);
    if (bitrateAdjustmentScaleExp != 0) {
      codecBitrateBps = (int) (codecBitrateBps * getBitrateScale(bitrateAdjustmentScaleExp));
    }
  } else {
    Logging.v(TAG, "setRates: " + kbps + " kbps. Fps: " + targetFps);
  }

  try {
    Bundle params = new Bundle();
    params.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, codecBitrateBps);
    mediaCodec.setParameters(params);
    return true;
  } catch (IllegalStateException e) {
    Logging.e(TAG, "setRates failed", e);
    return false;
  }
}
项目:Exoplayer2Radio    文件:MediaCodecVideoRenderer.java   
@Override
protected void configureCodec(MediaCodecInfo codecInfo, MediaCodec codec, Format format,
    MediaCrypto crypto) throws DecoderQueryException {
  codecMaxValues = getCodecMaxValues(codecInfo, format, streamFormats);
  MediaFormat mediaFormat = getMediaFormat(format, codecMaxValues, deviceNeedsAutoFrcWorkaround,
      tunnelingAudioSessionId);
  codec.configure(mediaFormat, surface, crypto, 0);
  if (Util.SDK_INT >= 23 && tunneling) {
    tunnelingOnFrameRenderedListener = new OnFrameRenderedListenerV23(codec);
  }
}
项目:Exoplayer2Radio    文件:MediaCodecRenderer.java   
private static MediaCodec.CryptoInfo getFrameworkCryptoInfo(DecoderInputBuffer buffer,
    int adaptiveReconfigurationBytes) {
  MediaCodec.CryptoInfo cryptoInfo = buffer.cryptoInfo.getFrameworkCryptoInfoV16();
  if (adaptiveReconfigurationBytes == 0) {
    return cryptoInfo;
  }
  // There must be at least one sub-sample, although numBytesOfClearData is permitted to be
  // null if it contains no clear data. Instantiate it if needed, and add the reconfiguration
  // bytes to the clear byte count of the first sub-sample.
  if (cryptoInfo.numBytesOfClearData == null) {
    cryptoInfo.numBytesOfClearData = new int[1];
  }
  cryptoInfo.numBytesOfClearData[0] += adaptiveReconfigurationBytes;
  return cryptoInfo;
}
项目:19porn    文件:VideoEncoderSurfaceInput.java   
@SuppressLint("NewApi")
@SuppressWarnings("deprecation")
   @Override
   protected void beforeMediaCodecStart(MediaCodec mediaCodec) {
       if(null == mediaCodec){
           Log.e(TAG, "Please make sure, init() function is called successed!");
           return;
       }

       synchronized (this){
           mEncoderInputSurface = mediaCodec.createInputSurface();
       }
   }
项目:EZFilter    文件:VideoTrackTranscoder.java   
private int drainEncoder(long timeoutUs) {
    if (mIsEncoderEOS) return DRAIN_STATE_NONE;

    int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            if (mActualOutputFormat != null) {
                throw new RuntimeException("Video output format changed twice.");
            }
            mActualOutputFormat = mEncoder.getOutputFormat();
            mMuxer.setOutputFormat(QueuedMuxer.SampleType.VIDEO, mActualOutputFormat);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            mEncoderOutputBuffers = mEncoder.getOutputBuffers();
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if (mActualOutputFormat == null) {
        throw new RuntimeException("Could not determine actual output format.");
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsEncoderEOS = true;
        mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
        // SPS or PPS, which should be passed by MediaFormat.
        mEncoder.releaseOutputBuffer(result, false);
        return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    mMuxer.writeSampleData(QueuedMuxer.SampleType.VIDEO, mEncoderOutputBuffers[result], mBufferInfo);
    mEncoder.releaseOutputBuffer(result, false);
    return DRAIN_STATE_CONSUMED;
}
项目:rtmp-rtsp-stream-client-java    文件:VideoDecoder.java   
public boolean prepareVideo(Surface surface) {
  try {
    videoDecoder = MediaCodec.createDecoderByType(mime);
    videoDecoder.configure(videoFormat, surface, null, 0);
    return true;
  } catch (IOException e) {
    Log.e(TAG, "Prepare decoder error:", e);
    return false;
  }
}
项目:PlusGram    文件:MediaCodecAudioTrackRenderer.java   
@Override
protected void onOutputFormatChanged(MediaCodec codec, android.media.MediaFormat outputFormat) {
  boolean passthrough = passthroughMediaFormat != null;
  String mimeType = passthrough
      ? passthroughMediaFormat.getString(android.media.MediaFormat.KEY_MIME)
      : MimeTypes.AUDIO_RAW;
  android.media.MediaFormat format = passthrough ? passthroughMediaFormat : outputFormat;
  int channelCount = format.getInteger(android.media.MediaFormat.KEY_CHANNEL_COUNT);
  int sampleRate = format.getInteger(android.media.MediaFormat.KEY_SAMPLE_RATE);
  audioTrack.configure(mimeType, channelCount, sampleRate, pcmEncoding);
}
项目:ScreenRecordCaptureMaster    文件:YXAudioEncoder.java   
public void prepare(YXMuxerWrapper muxerWrapper) throws IOException {
    mMuxer = muxerWrapper;
    mBufferInfo = new MediaCodec.BufferInfo();

    mAudioEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
    mAudioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, CHANNEL);
    mAudioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectHE);
    mAudioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_STEREO);
    mAudioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
    mAudioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, CHANNEL);
    mAudioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 8192);
    mAudioEncoder.configure(mAudioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
}
项目:AppRTC-Android    文件:MediaCodecVideoEncoder.java   
static MediaCodec createByCodecName(String codecName) {
  try {
    // In the L-SDK this call can throw IOException so in order to work in
    // both cases catch an exception.
    return MediaCodec.createByCodecName(codecName);
  } catch (Exception e) {
    return null;
  }
}