所以Google Voice Recognition在android 的實現就變得極其輕松。
語音識別,借助於雲端技術可以識別用戶的語音輸入,包括語音控制等技術,下面我們將利用Google 提供的Api 實現這一功能。
功能點為:通過用戶語音將用戶輸入的語音識別出來,並打印在列表上。
功能界面如下:
用戶通過點擊speak按鈕顯示界面:
用戶說完話後,將提交到雲端搜索:
在雲端搜索完成後,返回打印數據:
完整代碼如下:
/*
* Copyright (C) 2008 The android Open Source Project
*
* Licensed under the apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implIEd.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.apis.app;
import com.example.android.apis.R;
import android.app.Activity;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.os.Bundle;
import android.speech.RecognizerIntent;
import android.view.VIEw;
import android.view.VIEw.OnClickListener;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ListVIEw;
import Java.util.ArrayList;
import Java.util.List;
/**
* Sample code that invokes the speech recognition intent API.
*/
public
class VoiceRecognition extends Activity implementsOnClickListener {
private
static
final
int VOICE_RECOGNITION_REQUEST_CODE =
1234;
private ListVIEw mList;
/**
* Called with the activity is first created.
*/
@Override
public
void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Inflate our UI from its XML layout description.
setContentVIEw(R.layout.voice_recognition);
// Get display items for later interaction
Button speakButton = (Button) findVIEwById(R.id.btn_speak);
mList = (ListView) findVIEwById(R.id.list);
// Check to see if a recognition activity is present
PackageManager pm = getPackageManager();
List<ResolveInfo> activitIEs =pm.queryIntentActivitIEs(
newIntent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH), 0);
if (activitIEs.size() !=
0) {
speakButton.setOnClickListener(this);
} else {
speakButton.setEnabled(false);
speakButton.setText("Recognizer not present");
}
}
/**
* Handle the click on the start recognition button.
*/
public
void onClick(VIEw v) {
if (v.getId() == R.id.btn_speak) {
startVoiceRecognitionActivity();
}
}
/**
* Fire an intent to start the speech recognition activity.
*/
private
void startVoiceRecognitionActivity() {
Intent intent =
new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL,
RecognizerIntent.LANGUAGE_MODEL_FREE_FORM);
intent.putExtra(RecognizerIntent.EXTRA_PROMPT,"Speech recognition demo");
startActivityForResult(intent, VOICE_RECOGNITION_REQUEST_CODE);
}
/**
* Handle the results from the recognition activity.
*/
@Override
protected
void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == VOICE_RECOGNITION_REQUEST_CODE &&resultCode == RESULT_OK) {
// Fill the list vIEw with the strings the recognizer thought it could have heard
ArrayList<String> matches =data.getStringArrayListExtra(
RecognizerIntent.EXTRA_RESULTS);
mList.setAdapter(new ArrayAdapter<String>(this, android.R.layout.simple_list_item_1,
matches));
}
super.onActivityResult(requestCode, resultCode, data);
}
}