首页 > 解决方案 > Androidstudio 使用两个后台服务报错

问题描述

我正在开发一个应用程序,我使用两个后台服务。一种是使用语音识别服务,另一种是使用聊天服务。这些服务在后台执行。两者都在应用程序运行时工作,但关闭应用程序也会崩溃。也许是关于“用户信息”的错误,所以我给出了一个字符串文本。但是没有任何改变。为什么我会收到此错误?我提前道歉,因为代码太长了。这些都是缺乏整体认识的。对不起...

识别.java

公共类识别扩展识别服务{

public static final int MSG_VOICE_RECO_READY = 0;
public static final int MSG_VOICE_RECO_END = 1;
public static final int MSG_VOICE_RECO_RESTART = 2;

private static SpeechRecognizer mSrRecognizer = null;
private TextToSpeech tts;
private String userID = "abc123";
boolean mBoolVoiceRecoStarted;
private boolean[] timelist = new boolean[8];


@Override
public void onCreate() {
    super.onCreate();
    startListening();
}

@Override
public int onStartCommand(Intent intent, int flags, int startId) {
    if (intent == null) {
        return Service.START_STICKY;
    }
    return super.onStartCommand(intent, flags, startId);
}

@Override
public void onDestroy() {
    super.onDestroy();
    Log.d("service", "destory");
}

@Override
protected void onStartListening(Intent recognizerIntent, Callback listener) {
}

private Handler mHdrVoiceRecoState = new Handler() {
    @Override
    public void handleMessage(Message msg) {

        switch (msg.what) {
            case MSG_VOICE_RECO_READY:
                break;
            case MSG_VOICE_RECO_END: {
                stopListening();
                sendEmptyMessageDelayed(MSG_VOICE_RECO_RESTART, 1500);
                break;
            }
            case MSG_VOICE_RECO_RESTART:
                startListening();
                break;
            default:
                super.handleMessage(msg);
        }

    }
};

public void startListening(){
    if (tts != null){
        tts = new TextToSpeech(this, new TextToSpeech.OnInitListener() {
            @Override
            public void onInit(int status) {
                if (status != ERROR) {
                    tts.setLanguage(Locale.KOREAN);
                }
            }
        });
    }

    if (mBoolVoiceRecoStarted == false)
    {
        if (mSrRecognizer == null)
        {
            mSrRecognizer = SpeechRecognizer.createSpeechRecognizer(getApplicationContext());
            mSrRecognizer.setRecognitionListener(mClsRecoListener);

        }
        if (mSrRecognizer.isRecognitionAvailable(getApplicationContext())){
            Intent itItent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH);
            itItent.putExtra(RecognizerIntent.EXTRA_PARTIAL_RESULTS,true);
            itItent.putExtra(RecognizerIntent.EXTRA_CALLING_PACKAGE, this.getPackageName());
            itItent.putExtra(RecognizerIntent.EXTRA_LANGUAGE, "ko-KR");
            itItent.putExtra(RecognizerIntent.EXTRA_MAX_RESULTS, 50);
            mSrRecognizer.startListening(itItent);
        }
    }
    mBoolVoiceRecoStarted = true;
}

public void stopListening() 
{
    try {
        if (mSrRecognizer != null && mBoolVoiceRecoStarted == true)
        {
            mSrRecognizer.stopListening(); 
            mSrRecognizer.destroy();
            mSrRecognizer = null;
        }

    } catch (Exception ex) {
        ex.printStackTrace();
    }

    mBoolVoiceRecoStarted = false; 
}



@Override
protected void onCancel(Callback listener) {
    mSrRecognizer.cancel();
}

@Override
protected void onStopListening(Callback listener) {

}

private RecognitionListener mClsRecoListener = new RecognitionListener() { 
    @Override
    public void onRmsChanged(float rmsdB){
    } 
    @Override
    public void onResults(Bundle results) { 
        mHdrVoiceRecoState.sendEmptyMessage(MSG_VOICE_RECO_END); // 1
        ArrayList<String> mResult = results.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
        String str = mResult.get(0);
        
        if (rs[0].compareTo("promise") == 0) {
            stopListening();
        }
    }

    private void send(){
        Intent intent = new Intent("abc");
        intent.putExtra("msg","1");
        LocalBroadcastManager.getInstance(getApplicationContext()).sendBroadcast(intent);
    }

    @Override
    public void onReadyForSpeech(Bundle params) {
    }

    @Override
    public void onEndOfSpeech() {
    }

    @Override
    public void onError(int intError) {
        mHdrVoiceRecoState.sendEmptyMessage(MSG_VOICE_RECO_END); // 1
        String message="";
        switch (intError) {
            case SpeechRecognizer.ERROR_AUDIO:
                message = "오디오 에러";
                break;
            case SpeechRecognizer.ERROR_CLIENT:
                message = "클라이언트 에러";
                break;
            case SpeechRecognizer.ERROR_INSUFFICIENT_PERMISSIONS:
                message = "퍼미션 없음";
                break;
            case SpeechRecognizer.ERROR_NETWORK:
                message = "네트워크 에러";
                break;
            case SpeechRecognizer.ERROR_NETWORK_TIMEOUT:
                message = "네트웍 타임아웃&quot;;
                break;
            case SpeechRecognizer.ERROR_NO_MATCH:
                message = "찾을 수 없음";
                break;
            case SpeechRecognizer.ERROR_RECOGNIZER_BUSY:
                message = "RECOGNIZER가 바쁨";
                break;
            case SpeechRecognizer.ERROR_SERVER:
                message = "서버가 이상함&quot;;
                break;
            case SpeechRecognizer.ERROR_SPEECH_TIMEOUT:
                message = "말하는 시간초과";
                break;
            default:
                message = "알 수 없는 오류임&quot;;
                break;
        }
    }

    @Override
    public void onBeginningOfSpeech() {
    }

    @Override
    public void onBufferReceived(byte[] buffer) {
    }

    @Override
    public void onEvent(int eventType, Bundle params) {
    }

    @Override
    public void onPartialResults(Bundle partialResults) {

        ArrayList<String> data =
                partialResults.getStringArrayList(SpeechRecognizer.RESULTS_RECOGNITION);
        ArrayList<String> unstableData =
                partialResults.getStringArrayList("android.speech.extra.UNSTABLE_TEXT");
        String mResult = data.get(0) + unstableData.get(0);
    }
};

public void readText(String str) {
    tts.setPitch(1.0f);        
    tts.setSpeechRate(1.0f);  
    tts.speak(str, TextToSpeech.QUEUE_FLUSH, null);
}

聊天服务.java

public class ChatService extends Service {
    private final IBinder mBinder = new LocalBinder();
    private ArrayList<ChatData> mList;
    public static int last_number;
    private ChatReceiveRequest chatReceiveRequest;
    private RequestQueue requestQueue;
    private static final int MSG_CHAT_RECEIVE_READY = 0;
    private static  final int MSG_CHAT_RECEIVE_END = 1;
    private static  final int MSG_CHAT_RECEIVE_RESTART = 2;
    private String userID = "abc123";
    private int cmp=0;
    private Response.Listener<String> responseListener1 = new Response.Listener<String>() {
        @Override
        public void onResponse(String response) {
            if(response!=null)
            try {
                JSONArray jsonArray = new JSONArray(response);
                JSONObject jsonObject = jsonArray.getJSONObject(0);
                boolean success = jsonObject.getBoolean("success");
                if (success) {
                    last_number = jsonObject.getInt("number");
                    String msg = jsonObject.getString("msg");
                    String date = jsonObject.getString("date");
                    String sender = jsonObject.getString("sender");
                    String receiver = jsonObject.getString("receiver");

                    if(!(sender.equals(userID)) && cmp != last_number) { 
                        mList.add(new ChatData(msg, date, sender, receiver));
                    }
                    else{
                    }
                    cmp = last_number;
                }
            } catch (JSONException jsonException) {
                jsonException.printStackTrace();
            }
        }
    };

    public class LocalBinder extends Binder{
        public ChatService getService() {
            return ChatService.this;
        }
    }

    @Override
    public IBinder onBind(Intent intent) {
        return mBinder;
    }

    @Override
    public void onCreate() {
        super.onCreate();
        Response.Listener<String> responseListener2 = new Response.Listener<String>() {
            @Override
            public void onResponse(String response) {
                System.out.println("response : " + response);
                try {
                    mList = new ArrayList<>();
                    JSONArray jsonArray = new JSONArray(response);
                    int i = 0;
                    while (i < jsonArray.length()) {
                        JSONObject jsonObject = jsonArray.getJSONObject(i);
                        boolean success = jsonObject.getBoolean("success");
                        if (success) {
                            last_number = jsonObject.getInt("number"); 
                            String msg = jsonObject.getString("msg");
                            String date = jsonObject.getString("date");
                            String sender = jsonObject.getString("sender");
                            String receiver = jsonObject.getString("receiver");

                            mList.add(new ChatData(msg,date,sender,receiver));
                        }
                        i++;
                    }
                    startListening();
                } catch (JSONException e) {
                    e.printStackTrace();
                } catch (Exception e) {
                    e.printStackTrace();
                }
            }
        };
        chatReceiveRequest = new ChatReceiveRequest(0, userID, responseListener2);
        requestQueue = Volley.newRequestQueue(getApplicationContext());
        requestQueue.add(chatReceiveRequest);
    }
    private Handler handler = new Handler()
    {
        @Override
        public void handleMessage(Message msg) {
            switch (msg.what)
            {
                case MSG_CHAT_RECEIVE_READY : break;
                case MSG_CHAT_RECEIVE_END:
                {
                    stopListening();
                    sendEmptyMessageDelayed(MSG_CHAT_RECEIVE_RESTART, 1000);
                    break;
                }
                case MSG_CHAT_RECEIVE_RESTART:
                    startListening();   break;
                default:
                    super.handleMessage(msg);
            }
        }
    };

    private void stopListening() {
    }

    private void startListening() {
        handler.sendEmptyMessage(MSG_CHAT_RECEIVE_END);
        chatReceiveRequest = new ChatReceiveRequest(last_number, userID, responseListener1);
        requestQueue = Volley.newRequestQueue(getApplicationContext());
        requestQueue.add(chatReceiveRequest);
    }

    @Override
    public int onStartCommand(Intent intent, int flags, int startId) {
        if (intent == null) {
            return Service.START_STICKY;
        }
        return super.onStartCommand(intent, flags, startId);
    }

    @Override
    public void onDestroy() {
        super.onDestroy();
    }


    public ArrayList<ChatData> returnChatList() {
        return mList;
    }
}

错误

e.android.googlequicksearchbox E/native: incremental_result.cc:197 SpeechErrorSpace::SpeechError(-73542): Result lattice is not set.
2021-04-06 14:32:18.137 4284-4303/? D/AF::Track: interceptBuffer: took 802us to intercept 0 tracks
2021-04-06 14:32:18.146 2022-3299/system_process D/WificondControl: Scan result ready event
2021-04-06 14:32:18.150 2185-2185/com.android.systemui I/KeyButtonView: Back button event: ACTION_UP
2021-04-06 14:32:18.169 2022-2052/system_process D/AutofillManagerService: onBackKeyPressed()
2021-04-06 14:32:18.175 2022-2130/system_process V/InputDispatcher: Asynchronous input event injection succeeded.
2021-04-06 14:32:18.207 9314-12623/com.google.android.googlequicksearchbox W/earchbox:searc: Long monitor contention with owner UserFacing430 (15991) at boolean android.os.BinderProxy.transactNative(int, android.os.Parcel, android.os.Parcel, int)(BinderProxy.java:-2) waiters=0 in void com.google.android.apps.gsa.speech.m.a.h.a(java.lang.Object) for 1.437s
2021-04-06 14:32:18.242 2022-3299/system_process W/InputReader: Device has associated, but no associated display id.
2021-04-06 14:32:18.243 2022-3299/system_process I/chatty: uid=1000(system) Binder:2022_13 identical 8 lines
2021-04-06 14:32:18.243 2022-3299/system_process W/InputReader: Device has associated, but no associated display id.
2021-04-06 14:32:18.346 15884-15884/com.example.main I/om.example.mai: System.exit called, status: 0
2021-04-06 14:32:18.346 15884-15884/com.example.main I/AndroidRuntime: VM exiting with result code 0, cleanup skipped.
2021-04-06 14:32:18.390 9314-15991/com.google.android.googlequicksearchbox I/AudioController: internalShutdown
2021-04-06 14:32:18.391 9314-15991/com.google.android.googlequicksearchbox E/AudioSource: Stop listening is called on already closed AudioSource
2021-04-06 14:32:18.540 2022-2130/system_process W/InputDispatcher: channel '3e4be8c com.example.main/com.example.main.MainActivity (server)' ~ Consumer closed input channel or an error occurred.  events=0x9
2021-04-06 14:32:18.541 2022-2130/system_process E/InputDispatcher: channel '3e4be8c com.example.main/com.example.main.MainActivity (server)' ~ Channel is unrecoverably broken and will be disposed!

标签: javaandroidandroid-studio

解决方案


推荐阅读