app/src/main/java/com/example/jarvis_stts/MainActivity.kt aktualisiert

This commit is contained in:
2026-03-11 16:26:02 +00:00
parent 5a5709de61
commit e86a2eb48c

View File

@@ -36,6 +36,7 @@ class MainActivity : AppCompatActivity(), RecognitionListener, TextToSpeech.OnIn
private var voskModel: Model? = null private var voskModel: Model? = null
private val client = OkHttpClient() private val client = OkHttpClient()
private var webSocket: WebSocket? = null private var webSocket: WebSocket? = null
private var isInteracting = false
// TTS Stimmen // TTS Stimmen
private var availableVoices = mutableListOf<Voice>() private var availableVoices = mutableListOf<Voice>()
@@ -53,6 +54,7 @@ class MainActivity : AppCompatActivity(), RecognitionListener, TextToSpeech.OnIn
} }
// Vosk wieder starten, nachdem Google fertig ist // Vosk wieder starten, nachdem Google fertig ist
isInteracting = false
startVosk() startVosk()
} }
@@ -141,14 +143,16 @@ class MainActivity : AppCompatActivity(), RecognitionListener, TextToSpeech.OnIn
// --- Vosk RecognitionListener --- // --- Vosk RecognitionListener ---
override fun onPartialResult(hypothesis: String) { override fun onPartialResult(hypothesis: String) {
val recognizedText = extractText(hypothesis) if (isInteracting) return // Wenn wir schon dabei sind, ignoriere weiteres
Log.d("JARVIS", "Vosk hört: $recognizedText")
// Wake-Word Check val recognizedText = extractText(hypothesis)
if (recognizedText.contains("computer", true) || recognizedText.contains("jarvis", true)) { if (recognizedText.contains("jarvis", true)) {
Log.d("JARVIS", "Wake-Word erkannt!") isInteracting = true // Sperre setzen
voskService?.stop() // Stoppen, um Mikrofon für Google freizugeben voskService?.stop()
tvStatus.postDelayed({
startVoiceInput() startVoiceInput()
}, 500)
} }
} }
@@ -170,13 +174,14 @@ class MainActivity : AppCompatActivity(), RecognitionListener, TextToSpeech.OnIn
val intent = Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH).apply { val intent = Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH).apply {
putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM) putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM)
putExtra(RecognizerIntent.EXTRA_LANGUAGE, "de-DE") putExtra(RecognizerIntent.EXTRA_LANGUAGE, "de-DE")
putExtra(RecognizerIntent.EXTRA_PROMPT, "Ich höre...") putExtra(RecognizerIntent.EXTRA_PROMPT, "Ich höre dir zu...")
// Diese Zeile hilft, dass Google nicht zu früh abbricht:
putExtra(RecognizerIntent.EXTRA_SPEECH_INPUT_MINIMUM_MILLIS, 2000L)
} }
try { try {
speechRecognizerLauncher.launch(intent) speechRecognizerLauncher.launch(intent)
} catch (e: Exception) { } catch (e: Exception) {
Toast.makeText(this, "Google Spracheingabe nicht verfügbar", Toast.LENGTH_SHORT).show() startVosk()
startVosk() // Falls Google scheitert, Vosk wieder an
} }
} }