waitInMainSignalCatcherLoop,Thread*=0x72c22ee000,peer=0x12d00280,"Signal Catcher"]: реагирование на сигнал 3
Добрый день всем, я хотел бы спросить, шляпа является причиной того, что ANR?. В моем проекте у меня есть сервис, связанный с деятельностью. Теперь, когда я выхожу из этой активности, приложение на мгновение зависает. Я думаю, что служба все еще работает, хотя я отменяю привязку в onStop() действия.
Вот мой класс обслуживания
public class SpeechService extends Service {
public interface Listener {
/**
* Called when a new piece of text was recognized by the Speech API.
*
* @param text The text.
* @param isFinal {@code true} when the API finished processing audio.
*/
void onSpeechRecognized(String text, boolean isFinal);
}
private static final String TAG = "SpeechService";
private static final String PREFS = "SpeechService";
private static final String PREF_ACCESS_TOKEN_VALUE = "access_token_value";
private static final String PREF_ACCESS_TOKEN_EXPIRATION_TIME = "access_token_expiration_time";
/** We reuse an access token if its expiration time is longer than this. */
private static final int ACCESS_TOKEN_EXPIRATION_TOLERANCE = 30 * 60 * 1000; // thirty minutes
/** We refresh the current access token before it expires. */
private static final int ACCESS_TOKEN_FETCH_MARGIN = 60 * 1000; // one minute
public static final List<String> SCOPE =
Collections.singletonList("https://www.googleapis.com/auth/cloud-platform");
private static final String HOSTNAME = "speech.googleapis.com";
private static final int PORT = 443;
private final SpeechBinder mBinder = new SpeechBinder();
private final ArrayList<Listener> mListeners = new ArrayList<>();
private volatile AccessTokenTask mAccessTokenTask;
private SpeechGrpc.SpeechStub mApi;
private static Handler mHandler;
private final StreamObserver<StreamingRecognizeResponse> mResponseObserver
= new StreamObserver<StreamingRecognizeResponse>() {
@Override
public void onNext(StreamingRecognizeResponse response) {
String text = null;
boolean isFinal = false;
if (response.getResultsCount() > 0) {
final StreamingRecognitionResult result = response.getResults(0);
isFinal = result.getIsFinal();
if (result.getAlternativesCount() > 0) {
final SpeechRecognitionAlternative alternative = result.getAlternatives(0);
text = alternative.getTranscript();
}
}
if (text != null) {
for (Listener listener : mListeners) {
listener.onSpeechRecognized(text, isFinal);
}
}
}
@Override
public void onError(Throwable t) {
Log.e(TAG, "Error calling the API.", t);
}
@Override
public void onCompleted() {
Log.i(TAG, "API completed.");
}
};
private final StreamObserver<RecognizeResponse> mFileResponseObserver
= new StreamObserver<RecognizeResponse>() {
@Override
public void onNext(RecognizeResponse response) {
String text = null;
if (response.getResultsCount() > 0) {
final SpeechRecognitionResult result = response.getResults(0);
if (result.getAlternativesCount() > 0) {
final SpeechRecognitionAlternative alternative = result.getAlternatives(0);
text = alternative.getTranscript();
}
}
if (text != null) {
for (Listener listener : mListeners) {
listener.onSpeechRecognized(text, true);
}
}
}
@Override
public void onError(Throwable t) {
Log.e(TAG, "Error calling the API.", t);
}
@Override
public void onCompleted() {
Log.i(TAG, "API completed.");
}
};
private StreamObserver<StreamingRecognizeRequest> mRequestObserver;
public static SpeechService from(IBinder binder) {
return ((SpeechBinder) binder).getService();
}
@Override
public void onCreate() {
super.onCreate();
mHandler = new Handler();
fetchAccessToken();
}
@Override
public void onDestroy() {
super.onDestroy();
mHandler.removeCallbacks(mFetchAccessTokenRunnable);
mHandler = null;
// Release the gRPC channel.
if (mApi != null) {
final ManagedChannel channel = (ManagedChannel) mApi.getChannel();
if (channel != null && !channel.isShutdown()) {
try {
channel.shutdown().awaitTermination(1, TimeUnit.SECONDS);
} catch (InterruptedException e) {
Log.e(TAG, "Error shutting down the gRPC channel.", e);
}
}
mApi = null;
}
}
private void fetchAccessToken() {
if (mAccessTokenTask != null) {
return;
}
mAccessTokenTask = new AccessTokenTask();
mAccessTokenTask.execute();
}
private String getDefaultLanguageCode() {
final Locale locale = Locale.getDefault();
final StringBuilder language = new StringBuilder(locale.getLanguage());
final String country = locale.getCountry();
if (!TextUtils.isEmpty(country)) {
language.append("-");
language.append(country);
}
return language.toString();
}
@Nullable
@Override
public IBinder onBind(Intent intent) {
return mBinder;
}
public void addListener(@NonNull Listener listener) {
mListeners.add(listener);
}
public void removeListener(@NonNull Listener listener) {
mListeners.remove(listener);
}
/**
* Starts recognizing speech audio.
*
* @param sampleRate The sample rate of the audio.
*/
public void startRecognizing(int sampleRate) {
if (mApi == null) {
Log.w(TAG, "API not ready. Ignoring the request.");
return;
}
// Configure the API
mRequestObserver = mApi.streamingRecognize(mResponseObserver);
mRequestObserver.onNext(StreamingRecognizeRequest.newBuilder()
.setStreamingConfig(StreamingRecognitionConfig.newBuilder()
.setConfig(RecognitionConfig.newBuilder()
.setLanguageCode(getDefaultLanguageCode())
.setEncoding(RecognitionConfig.AudioEncoding.LINEAR16)
.setSampleRateHertz(sampleRate)
.build())
.setInterimResults(true)
.setSingleUtterance(true)
.build())
.build());
}
/**
* Recognizes the speech audio. This method should be called every time a chunk of byte buffer
* is ready.
*
* @param data The audio data.
* @param size The number of elements that are actually relevant in the {@code data}.
*/
public void recognize(byte[] data, int size) {
if (mRequestObserver == null) {
return;
}
// Call the streaming recognition API
mRequestObserver.onNext(StreamingRecognizeRequest.newBuilder()
.setAudioContent(ByteString.copyFrom(data, 0, size))
.build());
}
/**
* Finishes recognizing speech audio.
*/
public void finishRecognizing() {
if (mRequestObserver == null) {
return;
}
mRequestObserver.onCompleted();
mRequestObserver = null;
}
/**
* Recognize all data from the specified {@link InputStream}.
*
* @param stream The audio data.
*/
public void recognizeInputStream(InputStream stream) {
try {
mApi.recognize(
RecognizeRequest.newBuilder()
.setConfig(RecognitionConfig.newBuilder()
.setEncoding(RecognitionConfig.AudioEncoding.LINEAR16)
.setLanguageCode("en-US")
.setSampleRateHertz(16000)
.build())
.setAudio(RecognitionAudio.newBuilder()
.setContent(ByteString.readFrom(stream))
.build())
.build(),
mFileResponseObserver);
} catch (IOException e) {
Log.e(TAG, "Error loading the input", e);
}
}
private class SpeechBinder extends Binder {
SpeechService getService() {
return SpeechService.this;
}
}
private final Runnable mFetchAccessTokenRunnable = new Runnable() {
@Override
public void run() {
fetchAccessToken();
}
};
private class AccessTokenTask extends AsyncTask<Void, Void, AccessToken> {
@Override
protected AccessToken doInBackground(Void... voids) {
final SharedPreferences prefs =
getSharedPreferences(PREFS, Context.MODE_PRIVATE);
String tokenValue = prefs.getString(PREF_ACCESS_TOKEN_VALUE, null);
long expirationTime = prefs.getLong(PREF_ACCESS_TOKEN_EXPIRATION_TIME, -1);
// Check if the current token is still valid for a while
if (tokenValue != null && expirationTime > 0) {
if (expirationTime
> System.currentTimeMillis() + ACCESS_TOKEN_EXPIRATION_TOLERANCE) {
return new AccessToken(tokenValue, new Date(expirationTime));
}
}
// ***** WARNING *****
// In this sample, we load the credential from a JSON file stored in a raw resource
// folder of this client app. You should never do this in your app. Instead, store
// the file in your server and obtain an access token from there.
// *******************
final InputStream stream = getResources().openRawResource(R.raw.credential);
try {
final GoogleCredentials credentials = GoogleCredentials.fromStream(stream)
.createScoped(SCOPE);
final AccessToken token = credentials.refreshAccessToken();
prefs.edit()
.putString(PREF_ACCESS_TOKEN_VALUE, token.getTokenValue())
.putLong(PREF_ACCESS_TOKEN_EXPIRATION_TIME,
token.getExpirationTime().getTime())
.apply();
return token;
} catch (IOException e) {
Log.e(TAG, "Failed to obtain access token.", e);
}
return null;
}
@Override
protected void onPostExecute(AccessToken accessToken) {
mAccessTokenTask = null;
final ManagedChannel channel = new OkHttpChannelProvider()
.builderForAddress(HOSTNAME, PORT)
.nameResolverFactory(new DnsNameResolverProvider())
.intercept(new GoogleCredentialsInterceptor(new GoogleCredentials(accessToken)
.createScoped(SCOPE)))
.build();
mApi = SpeechGrpc.newStub(channel);
// Schedule access token refresh before it expires
if (mHandler != null) {
mHandler.postDelayed(mFetchAccessTokenRunnable,
Math.max(accessToken.getExpirationTime().getTime()
- System.currentTimeMillis()
- ACCESS_TOKEN_FETCH_MARGIN, ACCESS_TOKEN_EXPIRATION_TOLERANCE));
}
}
}
/**
* Authenticates the gRPC channel using the specified {@link GoogleCredentials}.
*/
private static class GoogleCredentialsInterceptor implements ClientInterceptor {
private final Credentials mCredentials;
private Metadata mCached;
private Map<String, List<String>> mLastMetadata;
GoogleCredentialsInterceptor(Credentials credentials) {
mCredentials = credentials;
}
@Override
public <ReqT, RespT> ClientCall<ReqT, RespT> interceptCall(
final MethodDescriptor<ReqT, RespT> method, CallOptions callOptions,
final Channel next) {
return new ClientInterceptors.CheckedForwardingClientCall<ReqT, RespT>(
next.newCall(method, callOptions)) {
@Override
protected void checkedStart(Listener<RespT> responseListener, Metadata headers)
throws StatusException {
Metadata cachedSaved;
URI uri = serviceUri(next, method);
synchronized (this) {
Map<String, List<String>> latestMetadata = getRequestMetadata(uri);
if (mLastMetadata == null || mLastMetadata != latestMetadata) {
mLastMetadata = latestMetadata;
mCached = toHeaders(mLastMetadata);
}
cachedSaved = mCached;
}
headers.merge(cachedSaved);
delegate().start(responseListener, headers);
}
};
}
/**
* Generate a JWT-specific service URI. The URI is simply an identifier with enough
* information for a service to know that the JWT was intended for it. The URI will
* commonly be verified with a simple string equality check.
*/
private URI serviceUri(Channel channel, MethodDescriptor<?, ?> method)
throws StatusException {
String authority = channel.authority();
if (authority == null) {
throw Status.UNAUTHENTICATED
.withDescription("Channel has no authority")
.asException();
}
// Always use HTTPS, by definition.
final String scheme = "https";
final int defaultPort = 443;
String path = "/" + MethodDescriptor.extractFullServiceName(method.getFullMethodName());
URI uri;
try {
uri = new URI(scheme, authority, path, null, null);
} catch (URISyntaxException e) {
throw Status.UNAUTHENTICATED
.withDescription("Unable to construct service URI for auth")
.withCause(e).asException();
}
// The default port must not be present. Alternative ports should be present.
if (uri.getPort() == defaultPort) {
uri = removePort(uri);
}
return uri;
}
private URI removePort(URI uri) throws StatusException {
try {
return new URI(uri.getScheme(), uri.getUserInfo(), uri.getHost(), -1 /* port */,
uri.getPath(), uri.getQuery(), uri.getFragment());
} catch (URISyntaxException e) {
throw Status.UNAUTHENTICATED
.withDescription("Unable to construct service URI after removing port")
.withCause(e).asException();
}
}
private Map<String, List<String>> getRequestMetadata(URI uri) throws StatusException {
try {
return mCredentials.getRequestMetadata(uri);
} catch (IOException e) {
throw Status.UNAUTHENTICATED.withCause(e).asException();
}
}
private static Metadata toHeaders(Map<String, List<String>> metadata) {
Metadata headers = new Metadata();
if (metadata != null) {
for (String key : metadata.keySet()) {
Metadata.Key<String> headerKey = Metadata.Key.of(
key, Metadata.ASCII_STRING_MARSHALLER);
for (String value : metadata.get(key)) {
headers.put(headerKey, value);
}
}
}
return headers;
}
}
}
и вот мой класс деятельности
public class MainActivity extends AppCompatActivity implements MessageDialogFragment.Listener {
private static final String FRAGMENT_MESSAGE_DIALOG = "message_dialog";
private static final String STATE_RESULTS = "results";
private static final int REQUEST_RECORD_AUDIO_PERMISSION = 1;
private SpeechService mSpeechService;
private VoiceRecorder mVoiceRecorder;
private final VoiceRecorder.Callback mVoiceCallback = new VoiceRecorder.Callback() {
@Override
public void onVoiceStart() {
showStatus(true);
if (mSpeechService != null) {
mSpeechService.startRecognizing(mVoiceRecorder.getSampleRate());
}
}
@Override
public void onVoice(byte[] data, int size) {
if (mSpeechService != null) {
mSpeechService.recognize(data, size);
}
}
@Override
public void onVoiceEnd() {
showStatus(false);
if (mSpeechService != null) {
mSpeechService.finishRecognizing();
}
}
};
// Resource caches
private int mColorHearing;
private int mColorNotHearing;
// View references
private TextView mStatus;
private TextView mText, mResult;
private Button editButton, clearButton;
private SharedPreferences settings;
private final ServiceConnection mServiceConnection = new ServiceConnection() {
@Override
public void onServiceConnected(ComponentName componentName, IBinder binder) {
mSpeechService = SpeechService.from(binder);
mSpeechService.addListener(mSpeechServiceListener);
mStatus.setVisibility(View.VISIBLE);
}
@Override
public void onServiceDisconnected(ComponentName componentName) {
mSpeechService = null;
}
};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setDisplayShowHomeEnabled(true);
final Resources resources = getResources();
final Resources.Theme theme = getTheme();
mColorHearing = ResourcesCompat.getColor(resources, R.color.status_hearing, theme);
mColorNotHearing = ResourcesCompat.getColor(resources, R.color.status_not_hearing, theme);
mStatus = (TextView) findViewById(R.id.status);
mText = (TextView) findViewById(R.id.text);
mResult = (TextView) findViewById(R.id.resultText);
editButton = (Button)findViewById(R.id.button1);
clearButton = (Button)findViewById(R.id.button2);
settings = getSharedPreferences("MyPreference", Context.MODE_PRIVATE);
clearButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Sounds sounds = new Sounds(getApplicationContext());
if(settings.getBoolean("muteAble", false ) == true){
sounds.playSound();
}
mResult.setText("");
}
});
editButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Sounds sounds = new Sounds(getApplicationContext());
if(settings.getBoolean("muteAble", false ) == true){
sounds.playSound();
}
Intent editIntent = new Intent(MainActivity.this, EditorActivity.class);
String forEditText = mResult.getText().toString();
editIntent.putExtra("forEdit", forEditText);
startActivity(editIntent);
}
});
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
if(id == android.R.id.home){
this.finish();
}
return super.onOptionsItemSelected(item);
}
@Override
protected void onStart() {
super.onStart();
// Prepare Cloud Speech API
bindService(new Intent(this, SpeechService.class), mServiceConnection, BIND_AUTO_CREATE);
// Start listening to voices
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.RECORD_AUDIO)
== PackageManager.PERMISSION_GRANTED) {
startVoiceRecorder();
} else if (ActivityCompat.shouldShowRequestPermissionRationale(this,
Manifest.permission.RECORD_AUDIO)) {
showPermissionMessageDialog();
} else {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.RECORD_AUDIO},
REQUEST_RECORD_AUDIO_PERMISSION);
}
}
@Override
protected void onStop() {
// Stop listening to voice
stopVoiceRecorder();
// Stop Cloud Speech API
mSpeechService.removeListener(mSpeechServiceListener);
unbindService(mServiceConnection);
mSpeechService = null;
super.onStop();
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions,
@NonNull int[] grantResults) {
if (requestCode == REQUEST_RECORD_AUDIO_PERMISSION) {
if (permissions.length == 1 && grantResults.length == 1
&& grantResults[0] == PackageManager.PERMISSION_GRANTED) {
startVoiceRecorder();
} else {
showPermissionMessageDialog();
}
} else {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
}
private void startVoiceRecorder() {
if (mVoiceRecorder != null) {
mVoiceRecorder.stop();
}
mVoiceRecorder = new VoiceRecorder(mVoiceCallback);
mVoiceRecorder.start();
}
private void stopVoiceRecorder() {
if (mVoiceRecorder != null) {
mVoiceRecorder.stop();
mVoiceRecorder = null;
}
}
private void showPermissionMessageDialog() {
MessageDialogFragment
.newInstance(getString(R.string.permission_message))
.show(getSupportFragmentManager(), FRAGMENT_MESSAGE_DIALOG);
}
private void showStatus(final boolean hearingVoice) {
runOnUiThread(new Runnable() {
@Override
public void run() {
mStatus.setTextColor(hearingVoice ? mColorHearing : mColorNotHearing);
}
});
}
@Override
public void onMessageDialogDismissed() {
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.RECORD_AUDIO},
REQUEST_RECORD_AUDIO_PERMISSION);
}
private final SpeechService.Listener mSpeechServiceListener =
new SpeechService.Listener() {
@Override
public void onSpeechRecognized(final String text, final boolean isFinal) {
if (isFinal) {
mVoiceRecorder.dismiss();
}
if (mText != null && !TextUtils.isEmpty(text)) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (isFinal) {
mText.setText(null);
mResult.append(" "+text.toString());
} else {
mText.setText(text);
}
}
});
}
}
};
}
Заранее благодарю за помощь