Skip to content

Commit 8adc0e1

Browse files
committed
test stream realtime
1 parent abb87ed commit 8adc0e1

File tree

4 files changed

+79
-3
lines changed

4 files changed

+79
-3
lines changed

firebase-ai/app/src/main/AndroidManifest.xml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,9 @@
22
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
33
xmlns:tools="http://schemas.android.com/tools">
44

5+
<!-- Needed for Firebase AI Logic Bidirectional streaming (Live API) -->
6+
<uses-permission android:name="android.permission.RECORD_AUDIO" />
7+
58
<application
69
android:allowBackup="true"
710
android:dataExtractionRules="@xml/data_extraction_rules"

firebase-ai/app/src/main/java/com/google/firebase/quickstart/ai/FirebaseAISamples.kt

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -209,5 +209,12 @@ val FIREBASE_AI_SAMPLES = listOf(
209209
" anything important which people say in the video."
210210
)
211211
}
212+
),
213+
Sample(
214+
title = "Live API",
215+
description = "Bidirectional streaming",
216+
navRoute = "stream",
217+
categories = listOf(Category.AUDIO),
218+
backend = GenerativeBackend.vertexAI()
212219
)
213220
)
Lines changed: 17 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,34 @@
11
package com.google.firebase.quickstart.ai.feature.live
22

33
import androidx.compose.foundation.layout.Box
4+
import androidx.compose.foundation.layout.Row
45
import androidx.compose.foundation.layout.fillMaxSize
6+
import androidx.compose.material3.Button
57
import androidx.compose.material3.Text
68
import androidx.compose.runtime.Composable
79
import androidx.compose.ui.Modifier
10+
import androidx.lifecycle.viewmodel.compose.viewModel
811
import kotlinx.serialization.Serializable
912

1013
@Serializable
1114
class StreamRealtimeRoute(val sampleId: String)
1215

1316
@Composable
14-
fun StreamRealtimeScreen() {
15-
Box(
17+
fun StreamRealtimeScreen(
18+
streamViewModel: StreamRealtimeViewModel = viewModel<StreamRealtimeViewModel>()
19+
) {
20+
Row(
1621
modifier = Modifier.fillMaxSize()
1722
) {
18-
Text("Coming soon")
23+
Button(onClick = {
24+
streamViewModel.connect()
25+
}) {
26+
Text("Connect")
27+
}
28+
Button(onClick = {
29+
streamViewModel.start()
30+
}) {
31+
Text("Start")
32+
}
1933
}
2034
}
Lines changed: 52 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,52 @@
1+
package com.google.firebase.quickstart.ai.feature.live
2+
3+
import android.annotation.SuppressLint
4+
import androidx.lifecycle.SavedStateHandle
5+
import androidx.lifecycle.ViewModel
6+
import androidx.lifecycle.viewModelScope
7+
import androidx.navigation.toRoute
8+
import com.google.firebase.Firebase
9+
import com.google.firebase.ai.ai
10+
import com.google.firebase.ai.type.LiveSession
11+
import com.google.firebase.ai.type.PublicPreviewAPI
12+
import com.google.firebase.ai.type.ResponseModality
13+
import com.google.firebase.ai.type.SpeechConfig
14+
import com.google.firebase.ai.type.Voice
15+
import com.google.firebase.ai.type.liveGenerationConfig
16+
import com.google.firebase.quickstart.ai.FIREBASE_AI_SAMPLES
17+
import kotlinx.coroutines.Dispatchers
18+
import kotlinx.coroutines.launch
19+
20+
@SuppressLint("MissingPermission")
21+
@OptIn(PublicPreviewAPI::class)
22+
class StreamRealtimeViewModel(
23+
savedStateHandle: SavedStateHandle
24+
) : ViewModel() {
25+
private val sampleId = savedStateHandle.toRoute<StreamRealtimeRoute>().sampleId
26+
private val sample = FIREBASE_AI_SAMPLES.first { it.id == sampleId }
27+
28+
private lateinit var session: LiveSession
29+
30+
fun connect() {
31+
val generativeModel = Firebase.ai(
32+
backend = sample.backend // GenerativeBackend.googleAI() by default
33+
).liveModel(
34+
modelName = sample.modelName ?: "gemini-2.0-flash-live-preview-04-09",
35+
systemInstruction = sample.systemInstructions,
36+
generationConfig = liveGenerationConfig {
37+
responseModality = ResponseModality.AUDIO
38+
speechConfig = SpeechConfig(voice = Voice("ZEPHYR"))
39+
}
40+
)
41+
viewModelScope.launch {
42+
session = generativeModel.connect()
43+
session.startAudioConversation()
44+
}
45+
}
46+
47+
fun start() {
48+
viewModelScope.launch {
49+
session.startAudioConversation()
50+
}
51+
}
52+
}

0 commit comments

Comments
 (0)