Android2.3だからCだけでテルミンもどきを作った。
(2012 追記)以下の内容は読まずこちらを参考にしてください http://d.hatena.ne.jp/miujun/20120307
(2011 9/30 追記)先ほどHTC EVO WIMAXを2.3にあげて試したのですが残念な結果でした。音は出るし周波数も変わりますが遅延があまりにひどかった。エミュレータでは良かったのですがねえ〜。ということですので注意してお読みください。
ソースが汚いので部屋を明るくして離れてご覧ください
動的なサウンド生成はAndroid用のOpenSL ESをGUIはNativeActivityで作られたテルミンです。正直まだOpenSL ESのサンプルも少なく私自身が実験して導き出したもので誤りも含まれているかもしれません。nexus sも持っていませんしエミュレータで確認した程度ですが動的なサウンド生成には成功しています。アドバイスや訂正があればコメントください。Android NDK,r5のサンプルnative-activityとnative-audioを参考にしています。GUIはnative-activityそのものですが。
#include <jni.h>
#include <errno.h>
#include <EGL/egl.h>
#include <GLES/gl.h>
#include <android/sensor.h>
#include <android/log.h>
#include <android_native_app_glue.h>
#define LOGI(...) ((void)__android_log_print(ANDROID_LOG_INFO, "native-activity", __VA_ARGS__))
#define LOGW(...) ((void)__android_log_print(ANDROID_LOG_WARN, "native-activity", __VA_ARGS__))
#include <stdlib.h>
#include <math.h>
#include <assert.h>
#include <string.h>
// for native audio
#include <SLES/OpenSLES.h>
#include "SLES/OpenSLES_Android.h"
// engine interfaces
static SLObjectItf engineObject = NULL;
static SLEngineItf engineEngine;
// output mix interfaces
static SLObjectItf outputMixObject = NULL;
// buffer queue player interfaces
static SLObjectItf bqPlayerObject = NULL;
static SLPlayItf bqPlayerPlay;
static SLAndroidSimpleBufferQueueItf bqPlayerBufferQueue;
#define OUTPUT_FRAMES 4096/2
static short outputBuffer[OUTPUT_FRAMES];
static short *nextBuffer;
static unsigned nextSize;
static int nextCount;
static double freqNum = 0.0;
static double ampNum = 0.0;
static double phase;
#define PI 3.141592
void tone_dynamic2(){
unsigned i;
for (i = 0; i < OUTPUT_FRAMES; ++i) {
phase += 22050*freqNum / 44100.0;
phase = (phase >= 1.0) ? -1 : phase;
outputBuffer[i] = (phase < 0) ? 0 : 32768*ampNum;
}
}
// this callback handler is called every time a buffer finishes playing
void bqPlayerCallback(SLAndroidSimpleBufferQueueItf bq, void *context)
{
assert(bq == bqPlayerBufferQueue);
assert(NULL == context);
tone_dynamic2();
SLresult result;
result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, nextBuffer, nextSize);
assert(SL_RESULT_SUCCESS == result);
}
void createEngine()
{
SLresult result;
// create engine
result = slCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL);
assert(SL_RESULT_SUCCESS == result);
// realize the engine
result = (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE);
assert(SL_RESULT_SUCCESS == result);
// get the engine interface, which is needed in order to create other objects
result = (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine);
assert(SL_RESULT_SUCCESS == result);
// create output mix, with environmental reverb specified as a non-required interface
const SLInterfaceID ids[1] = {SL_IID_ENVIRONMENTALREVERB};
const SLboolean req[1] = {SL_BOOLEAN_FALSE};
result = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 1, ids, req);
assert(SL_RESULT_SUCCESS == result);
// realize the output mix
result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE);
assert(SL_RESULT_SUCCESS == result);
}
// create buffer queue audio player
void createBufferQueueAudioPlayer()
{
SLresult result;
// configure audio source
SLDataLocator_AndroidSimpleBufferQueue loc_bufq = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE, 2};
SLDataFormat_PCM format_pcm = {SL_DATAFORMAT_PCM, 1, SL_SAMPLINGRATE_44_1,
SL_PCMSAMPLEFORMAT_FIXED_16, SL_PCMSAMPLEFORMAT_FIXED_16,
SL_SPEAKER_FRONT_CENTER, SL_BYTEORDER_LITTLEENDIAN};
SLDataSource audioSrc = {&loc_bufq, &format_pcm};
// configure audio sink
SLDataLocator_OutputMix loc_outmix = {SL_DATALOCATOR_OUTPUTMIX, outputMixObject};
SLDataSink audioSnk = {&loc_outmix, NULL};
// create audio player
const SLInterfaceID ids[2] = {SL_IID_BUFFERQUEUE, SL_IID_EFFECTSEND};
const SLboolean req[2] = {SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE};
result = (*engineEngine)->CreateAudioPlayer(engineEngine, &bqPlayerObject, &audioSrc, &audioSnk,
2, ids, req);
assert(SL_RESULT_SUCCESS == result);
// realize the player
result = (*bqPlayerObject)->Realize(bqPlayerObject, SL_BOOLEAN_FALSE);
assert(SL_RESULT_SUCCESS == result);
// get the play interface
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_PLAY, &bqPlayerPlay);
assert(SL_RESULT_SUCCESS == result);
// get the buffer queue interface
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_BUFFERQUEUE,
&bqPlayerBufferQueue);
assert(SL_RESULT_SUCCESS == result);
// register callback on the buffer queue
result = (*bqPlayerBufferQueue)->RegisterCallback(bqPlayerBufferQueue, bqPlayerCallback, NULL);
assert(SL_RESULT_SUCCESS == result);
// set the player's state to playing
result = (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PLAYING);
assert(SL_RESULT_SUCCESS == result);
}
void setStop()
{
SLresult result;
nextBuffer = NULL;
nextSize = 0;
result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, nextBuffer, nextSize);
result = (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PAUSED);
assert(SL_RESULT_SUCCESS == result);
}
void setTone()
{
SLresult result;
tone_dynamic2();
nextBuffer = outputBuffer;
nextSize = sizeof(outputBuffer);
result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, nextBuffer, nextSize);
}
void setFreq(double count)
{
freqNum = count;
}
void setAmp(double count)
{
ampNum = count;
}
void shutdown()
{
// destroy buffer queue audio player object, and invalidate all associated interfaces
if (bqPlayerObject != NULL) {
(*bqPlayerObject)->Destroy(bqPlayerObject);
bqPlayerObject = NULL;
bqPlayerPlay = NULL;
bqPlayerBufferQueue = NULL;
}
// destroy output mix object, and invalidate all associated interfaces
if (outputMixObject != NULL) {
(*outputMixObject)->Destroy(outputMixObject);
outputMixObject = NULL;
}
// destroy engine object, and invalidate all associated interfaces
if (engineObject != NULL) {
(*engineObject)->Destroy(engineObject);
engineObject = NULL;
engineEngine = NULL;
}
}
/**
* Our saved state data.
*/
struct saved_state {
float angle;
int32_t x;
int32_t y;
};
/**
* Shared state for our app.
*/
struct engine {
struct android_app* app;
ASensorManager* sensorManager;
const ASensor* accelerometerSensor;
ASensorEventQueue* sensorEventQueue;
int animating;
EGLDisplay display;
EGLSurface surface;
EGLContext context;
int32_t width;
int32_t height;
struct saved_state state;
};
/**
* Initialize an EGL context for the current display.
*/
static int engine_init_display(struct engine* engine) {
// initialize OpenGL ES and EGL
/*
* Here specify the attributes of the desired configuration.
* Below, we select an EGLConfig with at least 8 bits per color
* component compatible with on-screen windows
*/
const EGLint attribs[] = {
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_BLUE_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_RED_SIZE, 8,
EGL_NONE
};
EGLint w, h, dummy, format;
EGLint numConfigs;
EGLConfig config;
EGLSurface surface;
EGLContext context;
EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
eglInitialize(display, 0, 0);
/* Here, the application chooses the configuration it desires. In this
* sample, we have a very simplified selection process, where we pick
* the first EGLConfig that matches our criteria */
eglChooseConfig(display, attribs, &config, 1, &numConfigs);
/* EGL_NATIVE_VISUAL_ID is an attribute of the EGLConfig that is
* guaranteed to be accepted by ANativeWindow_setBuffersGeometry().
* As soon as we picked a EGLConfig, we can safely reconfigure the
* ANativeWindow buffers to match, using EGL_NATIVE_VISUAL_ID. */
eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, &format);
ANativeWindow_setBuffersGeometry(engine->app->window, 0, 0, format);
surface = eglCreateWindowSurface(display, config, engine->app->window, NULL);
context = eglCreateContext(display, config, NULL, NULL);
if (eglMakeCurrent(display, surface, surface, context) == EGL_FALSE) {
LOGW("Unable to eglMakeCurrent");
return -1;
}
eglQuerySurface(display, surface, EGL_WIDTH, &w);
eglQuerySurface(display, surface, EGL_HEIGHT, &h);
engine->display = display;
engine->context = context;
engine->surface = surface;
engine->width = w;
engine->height = h;
engine->state.angle = 0;
// Initialize GL state.
glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_FASTEST);
glEnable(GL_CULL_FACE);
glShadeModel(GL_SMOOTH);
glDisable(GL_DEPTH_TEST);
return 0;
}
/**
* Just the current frame in the display.
*/
static void engine_draw_frame(struct engine* engine) {
if (engine->display == NULL) {
// No display.
return;
}
// Just fill the screen with a color.
glClearColor(((float)engine->state.x)/engine->width, engine->state.angle,
((float)engine->state.y)/engine->height, 1);
glClear(GL_COLOR_BUFFER_BIT);
eglSwapBuffers(engine->display, engine->surface);
}
/**
* Tear down the EGL context currently associated with the display.
*/
static void engine_term_display(struct engine* engine) {
if (engine->display != EGL_NO_DISPLAY) {
eglMakeCurrent(engine->display, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
if (engine->context != EGL_NO_CONTEXT) {
eglDestroyContext(engine->display, engine->context);
}
if (engine->surface != EGL_NO_SURFACE) {
eglDestroySurface(engine->display, engine->surface);
}
eglTerminate(engine->display);
}
engine->animating = 0;
engine->display = EGL_NO_DISPLAY;
engine->context = EGL_NO_CONTEXT;
engine->surface = EGL_NO_SURFACE;
}
/**
* Process the next input event.
*/
static int32_t engine_handle_input(struct android_app* app, AInputEvent* event) {
struct engine* engine = (struct engine*)app->userData;
if (AInputEvent_getType(event) == AINPUT_EVENT_TYPE_MOTION) {
engine->animating = 1;
engine->state.x = AMotionEvent_getX(event, 0);
engine->state.y = AMotionEvent_getY(event, 0);
setFreq((double)engine->state.x/engine->width);
setAmp((double)engine->state.y/engine->height);
return 1;
}
return 0;
}
/**
* Process the next main command.
*/
static void engine_handle_cmd(struct android_app* app, int32_t cmd) {
struct engine* engine = (struct engine*)app->userData;
switch (cmd) {
case APP_CMD_SAVE_STATE:
// The system has asked us to save our current state. Do so.
engine->app->savedState = malloc(sizeof(struct saved_state));
*((struct saved_state*)engine->app->savedState) = engine->state;
engine->app->savedStateSize = sizeof(struct saved_state);
break;
case APP_CMD_INIT_WINDOW:
// The window is being shown, get it ready.
if (engine->app->window != NULL) {
engine_init_display(engine);
engine_draw_frame(engine);
setFreq(0);
setAmp(0);
createEngine();
createBufferQueueAudioPlayer();
setTone();
}
break;
case APP_CMD_TERM_WINDOW:
// The window is being hidden or closed, clean it up.
engine_term_display(engine);
setStop();
shutdown();
break;
case APP_CMD_GAINED_FOCUS:
// When our app gains focus, we start monitoring the accelerometer.
if (engine->accelerometerSensor != NULL) {
ASensorEventQueue_enableSensor(engine->sensorEventQueue,
engine->accelerometerSensor);
// We'd like to get 60 events per second (in us).
ASensorEventQueue_setEventRate(engine->sensorEventQueue,
engine->accelerometerSensor, (1000L/60)*1000);
}
break;
case APP_CMD_LOST_FOCUS:
// When our app loses focus, we stop monitoring the accelerometer.
// This is to avoid consuming battery while not being used.
if (engine->accelerometerSensor != NULL) {
ASensorEventQueue_disableSensor(engine->sensorEventQueue,
engine->accelerometerSensor);
}
// Also stop animating.
engine->animating = 0;
engine_draw_frame(engine);
break;
}
}
/**
* This is the main entry point of a native application that is using
* android_native_app_glue. It runs in its own thread, with its own
* event loop for receiving input events and doing other things.
*/
void android_main(struct android_app* state) {
struct engine engine;
// Make sure glue isn't stripped.
app_dummy();
memset(&engine, 0, sizeof(engine));
state->userData = &engine;
state->onAppCmd = engine_handle_cmd;
state->onInputEvent = engine_handle_input;
engine.app = state;
// Prepare to monitor accelerometer
engine.sensorManager = ASensorManager_getInstance();
engine.accelerometerSensor = ASensorManager_getDefaultSensor(engine.sensorManager,
ASENSOR_TYPE_ACCELEROMETER);
engine.sensorEventQueue = ASensorManager_createEventQueue(engine.sensorManager,
state->looper, LOOPER_ID_USER, NULL, NULL);
if (state->savedState != NULL) {
// We are starting with a previous saved state; restore from it.
engine.state = *(struct saved_state*)state->savedState;
}
// loop waiting for stuff to do.
while (1) {
// Read all pending events.
int ident;
int events;
struct android_poll_source* source;
// If not animating, we will block forever waiting for events.
// If animating, we loop until all events are read, then continue
// to draw the next frame of animation.
while ((ident=ALooper_pollAll(engine.animating ? 0 : -1, NULL, &events,
(void**)&source)) >= 0) {
// Process this event.
if (source != NULL) {
source->process(state, source);
}
// If a sensor has data, process it now.
if (ident == LOOPER_ID_USER) {
if (engine.accelerometerSensor != NULL) {
ASensorEvent event;
while (ASensorEventQueue_getEvents(engine.sensorEventQueue,
&event, 1) > 0) {
LOGI("accelerometer: x=%f y=%f z=%f",
event.acceleration.x, event.acceleration.y,
event.acceleration.z);
}
}
}
// Check if we are exiting.
if (state->destroyRequested != 0) {
engine_term_display(&engine);
return;
}
}
if (engine.animating) {
// Done with events; draw next animation frame.
engine.state.angle += .01f;
if (engine.state.angle > 1) {
engine.state.angle = 0;
}
// Drawing is throttled to the screen update rate, so there
// is no need to do timing here.
engine_draw_frame(&engine);
}
}
}Android.mk
LOCAL_PATH := $(call my-dir) include $(CLEAR_VARS) LOCAL_MODULE := native-activity LOCAL_SRC_FILES := main.c LOCAL_LDLIBS := -llog -landroid -lEGL -lGLESv1_CM # for native audio LOCAL_LDLIBS += -lOpenSLES # for logging LOCAL_LDLIBS += -llog # for native asset manager LOCAL_LDLIBS += -landroid LOCAL_STATIC_LIBRARIES := android_native_app_glue include $(BUILD_SHARED_LIBRARY) $(call import-module,android/native_app_glue)
Application.mk
APP_PLATFORM := android-9
Android2.3用OpenSL ESを使用するサウンドプログラミングの流れ
(2012 追記)以下の内容は読まずこちらを参考にしてください http://d.hatena.ne.jp/miujun/20120307
エンジンを作る
result = slCreateEngine(&engineObject, 0, NULL, 0, NULL, NULL);
エンジンを実現する、リアライズってどう訳すのかわからないのだが。
result = (*engineObject)->Realize(engineObject, SL_BOOLEAN_FALSE);
ここが次に作る出力を作る(SLEngineItf)engineEngineをインターフェイスにセットするという感じだと思う
result = (*engineObject)->GetInterface(engineObject, SL_IID_ENGINE, &engineEngine);
出力を作る。(SLEngineItf)engineEngineは出力やプレイヤーを作る
result = (*engineEngine)->CreateOutputMix(engineEngine, &outputMixObject, 1, ids, req);
出力を実現する
result = (*outputMixObject)->Realize(outputMixObject, SL_BOOLEAN_FALSE);
オーディオ・プレイヤーを作る
result = (*engineEngine)->CreateAudioPlayer(engineEngine, &bqPlayerObject, &audioSrc, &audioSnk, 2, ids, req);
オーディオ・プレイヤーを実現する
result = (*bqPlayerObject)->Realize(bqPlayerObject, SL_BOOLEAN_FALSE);
プレイヤーのインターフェイスを取得し、そのインターフェイスにプレイヤーの再生状態を管理するインターフェイスをセットする感じか
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_PLAY, &bqPlayerPlay);
バッファーのインターフェイスを取得する
result = (*bqPlayerObject)->GetInterface(bqPlayerObject, SL_IID_BUFFERQUEUE, &bqPlayerBufferQueue);
バッファにコールバック関数を登録する。再帰的に関数bqPlayerCallbackが呼ばれるのでこの中でサウンドを生成する。
result = (*bqPlayerBufferQueue)->RegisterCallback(bqPlayerBufferQueue, bqPlayerCallback, NULL);
プレイヤーの状態を再生中にする。
result = (*bqPlayerPlay)->SetPlayState(bqPlayerPlay, SL_PLAYSTATE_PLAYING);
バッファにデータを追加、nextBufferに波形を渡す。
result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, nextBuffer, nextSize);
関数bqPlayerCallbackの中で波形を生成する関数を実行すれば良い
void bqPlayerCallback(SLAndroidSimpleBufferQueueItf bq, void *context)
{
SLresult result;
result = (*bqPlayerBufferQueue)->Enqueue(bqPlayerBufferQueue, nextBuffer, nextSize);
}