Reputation: 179
Now I am developing a function that is equipped with voice recognition and void record.I am going to collect the voice buffer data when recognition process is happening. As with the RecognitionListener,the onBufferReceived should be trigged during recognition process but obvious no log has been printed during the recognition process. In my debugger mode, It also shows noting that should be stepped into method onBufferReceived. My intention is that collect the buffer data during the process of recognition and save the buffer into the recording file.
MainActivity
package com.example.syoui.voicerecordtest;
import android.Manifest;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.media.MediaPlayer;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.os.Handler;
import android.speech.RecognitionListener;
import android.speech.RecognizerIntent;
import android.speech.SpeechRecognizer;
import android.support.annotation.NonNull;
import android.support.v4.app.ActivityCompat;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.TextView;
import java.io.IOException;
import java.util.ArrayList;
import static android.R.attr.prompt;
public class MainActivity extends AppCompatActivity {
private static final String LOG_TAG = "AudioRecordTest";
private static final int REQUEST_RECORD_AUDIO_PERMISSION = 200;
private static String mFileName = null;
private RecordButton mRecordButton = null;
public MediaRecorder mRecorder = null;
private PlayButton mPlayButton = null;
private MediaPlayer mPlayer = null;
// Requesting permission to RECORD_AUDIO
private boolean permissionToRecordAccepted = false;
private String [] permissions = {Manifest.permission.RECORD_AUDIO};
private static final String TAG = "SpeechRecognizerSampleActivity";
private SpeechRecognizer recog;
private Runnable readyRecognizeSpeech;
public Handler handler = new Handler();
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
switch (requestCode){
case REQUEST_RECORD_AUDIO_PERMISSION:
permissionToRecordAccepted = grantResults[0] == PackageManager.PERMISSION_GRANTED;
break;
}
if (!permissionToRecordAccepted ) finish();
}
private void onRecord(boolean start) {
if (start) {
startRecording();
} else {
stopRecording();
}
}
private void onPlay(boolean start) {
if (start) {
startPlaying();
} else {
stopPlaying();
}
}
private void startPlaying() {
mPlayer = new MediaPlayer();
try {
mPlayer.setDataSource(mFileName);
mPlayer.prepare();
mPlayer.start();
} catch (IOException e) {
Log.e(LOG_TAG, "prepare() failed");
}
}
private void stopPlaying() {
mPlayer.release();
mPlayer = null;
}
private void startRecording() {
if( mRecorder != null){
stopRecording();
}
mRecorder = new MediaRecorder();
mRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mRecorder.setOutputFile(mFileName);
mRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
try {
mRecorder.prepare();
} catch (IOException e) {
Log.e(LOG_TAG, "prepare() failed");
}
mRecorder.start();
startRecognizeSpeech();
}
private void stopRecording() {
mRecorder.stop();
mRecorder.release();
mRecorder = null;
}
class RecordButton extends Button {
boolean mStartRecording = true;
OnClickListener clicker = new OnClickListener() {
public void onClick(View v) {
onRecord(mStartRecording);
if (mStartRecording) {
setText("Stop recording");
} else {
setText("Start recording");
}
mStartRecording = !mStartRecording;
}
};
public RecordButton(Context ctx) {
super(ctx);
setText("Start recording");
setOnClickListener(clicker);
}
}
class PlayButton extends Button {
boolean mStartPlaying = true;
OnClickListener clicker = new OnClickListener() {
public void onClick(View v) {
onPlay(mStartPlaying);
if (mStartPlaying) {
setText("Stop playing");
} else {
setText("Start playing");
}
mStartPlaying = !mStartPlaying;
}
};
public PlayButton(Context ctx) {
super(ctx);
setText("Start playing");
setOnClickListener(clicker);
}
}
@Override
public void onStop() {
super.onStop();
if (mRecorder != null) {
mRecorder.release();
mRecorder = null;
}
if (mPlayer != null) {
mPlayer.release();
mPlayer = null;
}
}
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.activity_main);
// Record to the external cache directory for visibility
mFileName = getExternalCacheDir().getAbsolutePath();
//mFileName = "/sdcard";
mFileName += "/audiorecordtest.3gp";
Log.i("mFileName",mFileName);
ActivityCompat.requestPermissions(this, permissions, REQUEST_RECORD_AUDIO_PERMISSION);
LinearLayout ll = (LinearLayout) findViewById(R.id.recordButton);
mRecordButton = new RecordButton(this);
ll.addView(mRecordButton,
new LinearLayout.LayoutParams(
ViewGroup.LayoutParams.WRAP_CONTENT,
ViewGroup.LayoutParams.WRAP_CONTENT,
0));
mPlayButton = new PlayButton(this);
ll.addView(mPlayButton,
new LinearLayout.LayoutParams(
ViewGroup.LayoutParams.WRAP_CONTENT,
ViewGroup.LayoutParams.WRAP_CONTENT,
0));
/*******/
recog = SpeechRecognizer.createSpeechRecognizer(this);
recog.setRecognitionListener(new RecogListener(this));
// listener登録
Button b = (Button)findViewById(R.id.start_recognize);
b.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
startRecognizeSpeech();
}
});
// startRecognizeSpeech();
}
private void startRecognizeSpeech() {
//Intent intent = RecognizerIntent.getVoiceDetailsIntent(getApplicationContext());
Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_WEB_SEARCH);
intent.putExtra(RecognizerIntent.EXTRA_PROMPT, prompt);
recog.startListening(intent);
((TextView)findViewById(R.id.status)).setText("");
((TextView)findViewById(R.id.sub_status)).setText("");
findViewById(R.id.start_recognize).setEnabled(false);
}
/***********/
private static class RecogListener implements RecognitionListener {
private MainActivity caller;
private TextView status;
private TextView subStatus;
RecogListener(MainActivity a) {
caller = a;
status = (TextView)a.findViewById(R.id.status);
subStatus = (TextView)a.findViewById(R.id.sub_status);
}
// 音声認識準備完了
@Override
public void onReadyForSpeech(Bundle params) {
status.setText("ready for speech");
Log.v(TAG,"ready for speech");
}
// 音声入力開始
@Override
public void onBeginningOfSpeech() {
status.setText("beginning of speech");
Log.v(TAG,"beginning of speech");
}
// 録音データのフィードバック用
@Override
public void onBufferReceived(byte[] buffer) {
status.setText("onBufferReceived");
Log.v(TAG,"onBufferReceived");
//status.setText(buffer.toString());
}
public void BufferReceived(byte[] buffer) {
status.setText("onBufferReceived");
Log.v(TAG,"onBufferReceived");
//status.setText(buffer.toString());
}
// 入力音声のdBが変化した
@Override
public void onRmsChanged(float rmsdB) {
String s = String.format("recieve : % 2.2f[dB]", rmsdB);
subStatus.setText(s);
//Log.v(TAG,"recieve : " + rmsdB + "dB");
}
// 音声入力終了
@Override
public void onEndOfSpeech() {
status.setText("end of speech");
Log.v(TAG,"end of speech");
caller.handler.postDelayed(caller.readyRecognizeSpeech, 500);
}
// ネットワークエラー又は、音声認識エラー
@Override
public void onError(int error) {
status.setText("on error");
caller.findViewById(R.id.start_recognize).setEnabled(true);
Log.v(TAG,"on error");
switch (error) {
case SpeechRecognizer.ERROR_AUDIO:
// 音声データ保存失敗
subStatus.setText("ERROR_AUDIO");
break;
case SpeechRecognizer.ERROR_CLIENT:
// Android端末内のエラー(その他)
subStatus.setText("ERROR_CLIENT");
break;
case SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS:
// 権限無し
subStatus.setText("ERROR_INSUFFICIENT_PERMISSIONS");
break;
case SpeechRecognizer.ERROR_NETWORK:
// ネットワークエラー(その他)
subStatus.setText("ERROR_NETWORK");
break;
case SpeechRecognizer.ERROR_NETWORK_TIMEOUT:
// ネットワークタイムアウトエラー
subStatus.setText("ERROR_NETWORK_TIMEOUT");
break;
case SpeechRecognizer.ERROR_NO_MATCH:
// 音声認識結果無し
subStatus.setText("ERROR_NO_MATCH");
caller.handler.postDelayed(caller.readyRecognizeSpeech,1000);
break;
case SpeechRecognizer.ERROR_RECOGNIZER_BUSY:
// RecognitionServiceへ要求出せず
subStatus.setText("ERROR_RECOGNIZER_BUSY");
caller.handler.postDelayed(caller.readyRecognizeSpeech,1000);
break;
case SpeechRecognizer.ERROR_SERVER:
// Server側からエラー通知
subStatus.setText("ERROR_SERVER");
break;
case SpeechRecognizer.ERROR_SPEECH_TIMEOUT:
// 音声入力無し
subStatus.setText("ERROR_SPEECH_TIMEOUT");
caller.handler.postDelayed(caller.readyRecognizeSpeech,1000);
break;
default:
}
}
// イベント発生時に呼び出される
@Override
public void onEvent(int eventType, Bundle params) {
status.setText("on event");
Log.v(TAG,"on event");
}
// 部分的な認識結果が得られる場合に呼び出される
@Override
public void onPartialResults(Bundle partialResults) {
status.setText("on partial results");
Log.v(TAG,"on results");
}
// 認識結果
@Override
public void onResults(Bundle data) {
status.setText("on results");
Log.v(TAG,"on results");
ArrayList<String> results = data.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
TextView t = (TextView)caller.findViewById(R.id.result);
t.setText("");
for (String s : results) {
t.append(s + "\n");
}
// boolean end=false;
// for (String s : results) {
// if (s.equals("終わり"))
// end=true;
// if (s.equals("おわり"))
// end=true;
// if (s.equals("キャンセル"))
// end=true;
// }
// if (end)
// caller.findViewById(R.id.start_recognize).setEnabled(true);
// else
// caller.startRecognizeSpeech();
caller.findViewById(R.id.start_recognize).setEnabled(true);
//caller.startRecognizeSpeech();
}
}
}
activity_main.xml
<?xml version="1.0" encoding="utf-8"?>
<android.support.constraint.ConstraintLayout
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context="com.example.syoui.voicerecordtest.MainActivity">
<LinearLayout
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:orientation="vertical" >
<Button
android:id="@+id/start_recognize"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/start_recognize" />
<TextView
android:id="@+id/status"
android:layout_width="fill_parent"
android:layout_height="wrap_content" />
<TextView
android:id="@+id/sub_status"
android:layout_width="fill_parent"
android:layout_height="wrap_content" />
<ScrollView
android:layout_height="fill_parent"
android:layout_width="wrap_content">
<TextView
android:id="@+id/result"
android:inputType="textMultiLine"
android:layout_height="fill_parent"
android:layout_width="fill_parent"/>
</ScrollView>
<LinearLayout
android:layout_width="fill_parent"
android:layout_height="130dp"
android:orientation="vertical"
android:id="@+id/recordButton">
</LinearLayout>
</LinearLayout>
</android.support.constraint.ConstraintLayout>
string.xml
<resources>
<string name="app_name">voiceRecordTest</string>
<string name="start_recognize">開始</string>
</resources>
Upvotes: 3
Views: 2534
Reputation: 11
As of ICS the onBufferReceived is not called anymore. Read this discussion how to build BufferReceived() to capture voice using RecognizerIntent?
Upvotes: 1