9

In my Android project, I use OpenSL ES to play audio files. I would like to be able to process audio on the fly by extracting audio samples, process them and redirect them to the audio output.

Here is what I tried so far:

// create the engine and output mix objects
void Java_com_ywl5320_openslaudio_MainActivity_createEngine(JNIEnv* env, jclass clazz, int newsamplerate, int newbuffersize)
{
    SLresult result;
    audioBuffer = calloc(buffersize, sizeof(int16_t));
    buffersize = newbuffersize;
    samplerate = newsamplerate;

    // create engine
    result = slCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;

    // realize the engine
    result = (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;

    // get the engine interface, which is needed in order to create other objects
    result = (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;

    // create output mix, with environmental reverb specified as a non-required interface
    const SLInterfaceID ids[] = {};
    const SLboolean req[] = {};
    result = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 0, ids, req);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;

    // realize the output mix
    result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;
}

// this callback handler is called every time a buffer finishes playing
void bqPlayerCallback(SLAndroidSimpleBufferQueueItf bq, void *context)
{
    if (--nextCount > 0 && NULL != audioBuffer && 0 != nextSize)
    {
        // audio processing...

        // enqueue another buffer
        SLresult result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, audioBuffer, nextSize);
        assert(SL_RESULT_SUCCESS == result);
        (void)result;
    }
}

// create asset audio player
jboolean Java_com_ywl5320_openslaudio_MainActivity_createAssetAudioPlayer(JNIEnv* env, jclass clazz, jobject assetManager, jstring filename)
{
    SLresult result;

    // convert Java string to UTF-8
    const char *utf8 = (*env)->GetStringUTFChars(env, filename, NULL);
    assert(NULL != utf8);

    // use asset manager to open asset by filename
    AAssetManager* mgr = AAssetManager_fromJava(env, assetManager);
    assert(NULL != mgr);
    AAsset* asset = AAssetManager_open(mgr, utf8, AASSET_MODE_UNKNOWN);

    // release the Java string and UTF-8
    (*env)->ReleaseStringUTFChars(env, filename, utf8);

    // the asset might not be found
    if (NULL == asset) {
        return JNI_FALSE;
    }

    // open asset as file descriptor
    off_t start, length;
    int fd = AAsset_openFileDescriptor(asset, &start, &length);
    assert(0 <= fd);
    AAsset_close(asset);

    // configure audio source
    SLDataLocator_AndroidFD loc_fd = {SL_DATALOCATOR_ANDROIDFD, fd, start, length};
    SLDataFormat_MIME format_mime = {SL_DATAFORMAT_MIME, NULL, SL_CONTAINERTYPE_UNSPECIFIED};
    SLDataSource audioSrc = {&loc_fd, &format_mime};

    // configure audio sink
    SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject};
    SLDataSink audioSnk = {&loc_outmix, NULL};

    // create audio player
    const SLInterfaceID ids[3] = {SL_IID_PLAY, SL_IID_BUFFERQUEUE, SL_IID_VOLUME};
    const SLboolean req[3] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE};
    result = (*engineEngine)->CreateAudioPlayer(engineEngine, &fdPlayerObject, &audioSrc, &audioSnk, 3, ids, req);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;

    // realize the player
    result = (*fdPlayerObject)->Realize(fdPlayerObject, SL_BOOLEAN_FALSE);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;

    // get the play interface
    result = (*fdPlayerObject)->GetInterface(fdPlayerObject, SL_IID_PLAY, &fdPlayerPlay);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;

    // get the buffer queue interface
    result = (*fdPlayerObject)->GetInterface(fdPlayerObject, SL_IID_BUFFERQUEUE, &bqPlayerBufferQueue);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;

    // register callback on the buffer queue
    result = (*bqPlayerBufferQueue)->RegisterCallback(utf8, bqPlayerCallback, NULL);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;

    // get the volume interface
    result = (*fdPlayerObject)->GetInterface(fdPlayerObject, SL_IID_VOLUME, &fdPlayerVolume);
    assert(SL_RESULT_SUCCESS == result);
    (void)result;

    return JNI_TRUE;
}

Unfortunately, it crashes when it tries to create the audio player, with the following error:

10-04 10:46:10.809 20531-20531/com.ywl5320.openslaudio E/libOpenSLES: can't require SL_IID_BUFFERQUEUE or SL_IID_ANDROIDSIMPLEBUFFERQUEUE with a non-buffer queue data sink
10-04 10:46:10.809 20531-20531/com.ywl5320.openslaudio W/libOpenSLES: Leaving Engine::CreateAudioPlayer (SL_RESULT_FEATURE_UNSUPPORTED)
10-04 10:46:10.809 20531-20531/com.ywl5320.openslaudio A/libc: E:\OpenSLAudio\app\src\main\cpp\opensl_audio.c:260: jboolean Java_com_ywl5320_openslaudio_MainActivity_createAssetAudioPlayer(JNIEnv *, jclass, jobject, jstring): assertion "SL_RESULT_SUCCESS == result" failed
10-04 10:46:10.810 20531-20531/com.ywl5320.openslaudio A/libc: Fatal signal 6 (SIGABRT), code -6 in tid 20531 (320.openslaudio)

    [ 10-04 10:46:10.811   441:  441 W/         ]
    debuggerd: handling request: pid=20531 uid=10230 gid=10230 tid=20531

It looks like the (*engineEngine)->CreateAudioPlayer() function doesn't work.

I also tried to follow this tutorial (part 2 here), but I haven't managed to make it work so far, since I'm a beginner with OpenSL ES.

So if anyone knows how to register the callback function, so I can process the audio sample of the audio file on the fly, I'm interested.

Thanks for your help.

matteoh
  • 2,810
  • 2
  • 29
  • 54
  • 2
    Looks like the tutorial used https://github.com/googlesamples/android-ndk/blob/master/native-audio/app/src/main/java/com/example/nativeaudio/NativeAudio.java – Gillsoft AB Oct 03 '18 at 19:39
  • @GillsoftAB I already checked that one and part of that code doesn't work for Android API > 19. Also, it doesn't show what I'm asking (or maybe I get it wrong), because I don't see any connection between the createAssetAudioPlayer() function and the createBufferQueueAudioPlayer() function... – matteoh Oct 04 '18 at 09:15
  • I think your exception is related to this topic: https://stackoverflow.com/questions/10770979/is-it-possible-to-get-a-byte-buffer-directly-from-an-audio-asset-in-opensl-es-f – Gillsoft AB Oct 04 '18 at 09:23
  • @GillsoftAB Thanks but unfortunately, it still doesn't work. I have the following error: "pAudioSnk: data format 0 not allowed", and I don't know what it means... – matteoh Oct 04 '18 at 15:36
  • 0 = no source, and is not allowed. You have to create a buffer from something. You might have to debug / execute on a real device and not in a simulator. – Gillsoft AB Oct 04 '18 at 15:41
  • @GillsoftAB That's what I want to do, create a buffer from an audio file, but I don't know how to do it. And also, I do all my tests on a real device. – matteoh Oct 04 '18 at 17:15

0 Answers0