首先在java层定义一个buffer,并且开辟相应的内存空间

private ByteBuffer byteBuffer;
byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * framesPerBuffer );

然后调用本地的方法来设置内存地址到native层,该方法在jni初始化时就已经被动态的注册了方法,注册的时候直接调用了底层的c++代码并且,通过java底层操作获取了buffer的内存地址,保存到了native c++中

nativeCacheDirectBufferAddress(byteBuffer, nativeAudioRecord);
{
  ALOGD("ctor%s", GetThreadInfo().c_str());

 // RTC_DCHECK(audio_parameters_.is_valid());
 // RTC_CHECK(j_environment_);
  JNINativeMethod native_methods[] = {
      {"nativeCacheDirectBufferAddress", "(Ljava/nio/ByteBuffer;J)V",
      reinterpret_cast<void*>(
          &webrtc::AudioRecordJni::CacheDirectBufferAddress)},
      {"nativeDataIsRecorded", "(IJ)V",
      reinterpret_cast<void*>(&webrtc::AudioRecordJni::DataIsRecorded)}};
  j_native_registration_ = j_environment_->RegisterNatives(
		  "com/ifreetalk/ftalk/module/audio/engine/AudioRecord",
      native_methods, arraysize(native_methods));
  j_audio_record_.reset(new JavaAudioRecord(
      j_native_registration_.get(),
      j_native_registration_->NewObject(
          "<init>", "(Landroid/content/Context;J)V",
          JVM::GetInstance()->context(), PointerTojlong(this))));
  // Detach from this thread since we want to use the checker to verify calls
  // from the Java based audio thread.
  thread_checker_java_.DetachFromThread();
}
void JNICALL AudioRecordJni::CacheDirectBufferAddress(
    JNIEnv* env, jobject obj, jobject byte_buffer, jlong nativeAudioRecord) {
  webrtc::AudioRecordJni* this_object =
      reinterpret_cast<webrtc::AudioRecordJni*> (nativeAudioRecord);
  this_object->OnCacheDirectBufferAddress(env, byte_buffer);
}

void AudioRecordJni::OnCacheDirectBufferAddress(
    JNIEnv* env, jobject byte_buffer) {
  //ALOGD("OnCacheDirectBufferAddress");
  //RTC_DCHECK(thread_checker_.CalledOnValidThread());
  //RTC_DCHECK(!direct_buffer_address_);
  direct_buffer_address_ =//内存地址映射
      env->GetDirectBufferAddress(byte_buffer);
  jlong capacity = env->GetDirectBufferCapacity(byte_buffer);//计算内存大小
//  ALOGD("direct buffer capacity: %lld", capacity);
  direct_buffer_capacity_in_bytes_ = static_cast<size_t>(capacity);//960
}

void JNICALL AudioRecordJni::DataIsRecorded(
  JNIEnv* env, jobject obj, jint length, jlong nativeAudioRecord) {
  webrtc::AudioRecordJni* this_object =
      reinterpret_cast<webrtc::AudioRecordJni*> (nativeAudioRecord);
  this_object->OnDataIsRecorded(length);
}

Logo

为开发者提供学习成长、分享交流、生态实践、资源工具等服务,帮助开发者快速成长。

更多推荐