Compare commits

...

24 Commits

Author SHA1 Message Date
Adam Gastineau
e0bf9756fc Make sure LLM client preserves Content-Type 2025-12-03 15:44:38 -08:00
Adam Gastineau
3759fd7cd5 Separate out empty transcription STT error 2025-11-27 09:21:36 -08:00
Adam Gastineau
a8719a2140 Immediately start listening when touchpad tapped, removing start delay 2025-11-27 07:09:56 -08:00
Adam Gastineau
d7a243a179 Drop OAI plugin from CI 2025-11-26 20:07:10 -08:00
Adam Gastineau
7be80e4230 Optionally use native Gemini client with Google Search 2025-11-26 19:50:25 -08:00
Adam Gastineau
1aa887e0f6 Migrated LangchainLlmService into main APK 2025-11-24 08:50:57 -08:00
Adam Gastineau
0809142deb Begin transition from openai-kotlin to langchain4j 2025-11-24 06:48:22 -08:00
Adam Gastineau
033bb7e5ae Fix tool calls not following the same conversation loop 2025-11-18 08:13:43 -08:00
Adam Gastineau
61b1dfdcae Fix OpenAI not providing proper error messages 2025-11-17 21:25:04 -08:00
Adam Gastineau
1218669f9b Fix simulator type 2025-10-23 11:41:24 -07:00
Adam Gastineau
15ce36b417 Bump Moonlight commit 2025-10-23 10:40:20 -07:00
Adam Gastineau
1f20d28c29 Lazy load conversation and message rows 2025-10-23 07:51:39 -07:00
Adam Gastineau
96b5e8d9be Added indexes to improve conversations view performance 2025-10-23 06:44:13 -07:00
Adam Gastineau
e407dca2fa Working IP address retrieval 2025-10-23 06:42:03 -07:00
Adam Gastineau
b895cff05f Fix white screen sliding in when switching back to MABL 2025-10-22 14:51:06 -07:00
Adam Gastineau
fac9180438 Add static query tool for immediately opening Humane settings 2025-10-22 12:31:34 -07:00
Adam Gastineau
99e6d8d533 Move MABL initial loading state into PlatformUI 2025-10-22 12:26:48 -07:00
Adam Gastineau
322092df5b Update Moonlight to fix double button application 2025-10-22 09:39:45 -07:00
Adam Gastineau
4c2a5d78d7 Improve experience with opening Settings and back gesture 2025-10-22 09:39:19 -07:00
Adam Gastineau
6d22504d4b Fix back gesture in launched Settings app 2025-10-21 11:34:23 -07:00
Adam Gastineau
02416c9d1b Play start listening sound effect when no on disk asset is provided 2025-10-20 07:36:32 -07:00
Adam Gastineau
23b64afd4c Update Moonlight to latest, fixing cursor snap issues 2025-10-19 08:51:24 -07:00
Adam Gastineau
f0df1f6361 Remap static queries to use examples array 2025-10-18 07:08:55 -07:00
Adam Gastineau
e5f3d74211 Local implementations for time and volume tools 2025-10-18 07:03:18 -07:00
47 changed files with 1179 additions and 618 deletions

View File

@ -89,7 +89,6 @@ jobs:
# Rename plugin APKs
cp plugins/demo/build/outputs/apk/release/demo-release.apk "release-artifacts/PenumbraOS-Plugin-Demo-${VERSION_NAME}.apk"
cp plugins/openai/build/outputs/apk/release/openai-release.apk "release-artifacts/PenumbraOS-Plugin-OpenAI-${VERSION_NAME}.apk"
cp plugins/aipinsystem/build/outputs/apk/release/aipinsystem-release.apk "release-artifacts/PenumbraOS-Plugin-AiPin-System-${VERSION_NAME}.apk"
cp plugins/system/build/outputs/apk/release/system-release.apk "release-artifacts/PenumbraOS-Plugin-Generic-System-${VERSION_NAME}.apk"
cp plugins/googlesearch/build/outputs/apk/release/googlesearch-release.apk "release-artifacts/PenumbraOS-Plugin-GoogleSearch-${VERSION_NAME}.apk"

View File

@ -58,7 +58,7 @@
<option name="ADVANCED_PROFILING_ENABLED" value="false" />
<option name="STARTUP_PROFILING_ENABLED" value="false" />
<option name="STARTUP_CPU_PROFILING_ENABLED" value="false" />
<option name="STARTUP_CPU_PROFILING_CONFIGURATION_NAME" value="Java/Kotlin Method Sample (legacy)" />
<option name="STARTUP_CPU_PROFILING_CONFIGURATION_NAME" value="System Trace" />
<option name="STARTUP_NATIVE_MEMORY_PROFILING_ENABLED" value="false" />
<option name="NATIVE_MEMORY_SAMPLE_RATE_BYTES" value="2048" />
</Profilers>
@ -69,7 +69,6 @@
<option name="SKIP_ACTIVITY_VALIDATION" value="false" />
<method v="2">
<option name="Gradle.BeforeRunTask" enabled="false" tasks="installDemoPlugins" externalProjectPath="$PROJECT_DIR$" vmOptions="" scriptParameters="" />
<option name="Gradle.BeforeRunTask" enabled="false" tasks="installOpenAiPlugin" externalProjectPath="$PROJECT_DIR$" vmOptions="" scriptParameters="" />
<option name="Android.Gradle.BeforeRunTask" enabled="true" />
</method>
</configuration>

View File

@ -4,10 +4,9 @@ fi
if ! [ -f mabl/src/main/assets/minilm-l6-v2-tokenizer.json ]; then
curl -L -o mabl/src/main/assets/minilm-l6-v2-tokenizer.json https://huggingface.co/sentence-transformers/all-MiniLM-L6-v2/resolve/main/tokenizer.json?download=true
fi
./gradlew :plugins:demo:installDebug :plugins:aipinsystem:installDebug :plugins:system:installDebug :plugins:openai:installDebug :plugins:googlesearch:installDebug :mabl:installAipinDebug
./gradlew :plugins:demo:installDebug :plugins:aipinsystem:installDebug :plugins:system:installDebug :plugins:googlesearch:installDebug :mabl:installAipinDebug
adb shell pm grant com.penumbraos.mabl.pin android.permission.CAMERA
adb shell appops set com.penumbraos.mabl.pin MANAGE_EXTERNAL_STORAGE allow
adb shell appops set com.penumbraos.plugins.openai MANAGE_EXTERNAL_STORAGE allow
adb shell pm disable-user --user 0 humane.experience.systemnavigation
sleep 1
adb shell cmd package set-home-activity com.penumbraos.mabl.pin/com.penumbraos.mabl.MainActivity

View File

@ -12,11 +12,11 @@ composeBom = "2025.08.00"
appcompat = "1.7.1"
fragment = "1.8.9"
material = "1.12.0"
penumbraos-sdk = "783df84"
penumbraos-sdk = "e1a344b"
penumbraos-sdk-local = "0.1.0"
moonlight-ui = "220702a"
moonlight-ui = "6ae1d5e"
moonlight-ui-local = "0.1.0"
openai-client = "dedfa47"
langchain4j = "67188d8"
ktor-client = "3.0.0"
kotlinx-serialization = "1.7.1"
kotlinx-coroutines = "1.8.1"
@ -35,7 +35,9 @@ androidx-core-ktx = { group = "androidx.core", name = "core-ktx", version.ref =
junit = { group = "junit", name = "junit", version.ref = "junit" }
androidx-junit = { group = "androidx.test.ext", name = "junit", version.ref = "junitVersion" }
androidx-espresso-core = { group = "androidx.test.espresso", name = "espresso-core", version.ref = "espressoCore" }
androidx-lifecycle-runtime-compose = { group = "androidx.lifecycle", name = "lifecycle-runtime-compose", version.ref = "lifecycleCompose" }
androidx-lifecycle-runtime-ktx = { group = "androidx.lifecycle", name = "lifecycle-runtime-ktx", version.ref = "lifecycleCompose" }
androidx-lifecycle-process = { group = "androidx.lifecycle", name = "lifecycle-process", version.ref = "lifecycleCompose" }
androidx-lifecycle-viewmodel-compose = { group = "androidx.lifecycle", name = "lifecycle-viewmodel-compose", version.ref = "lifecycleCompose" }
androidx-activity-compose = { group = "androidx.activity", name = "activity-compose", version.ref = "activityCompose" }
androidx-compose-bom = { group = "androidx.compose", name = "compose-bom", version.ref = "composeBom" }
@ -58,7 +60,10 @@ penumbraos-sdk = { group = "com.github.PenumbraOS", name = "sdk", version.ref =
#penumbraos-sdk = { group = "com.penumbraos", name = "sdk", version.ref = "penumbraos-sdk-local" }
moonlight-ui = { group = "com.github.agg23", name = "moonlight", version.ref = "moonlight-ui" }
#moonlight-ui = { group = "com.open.pin", name = "ui", version.ref = "moonlight-ui-local" }
openai-client = { group = "com.github.agg23.openai-kotlin", name = "openai-client", version.ref = "openai-client" }
langchain4j = { group = "com.github.agg23.langchain4j", name = "langchain4j-bom", version.ref = "langchain4j" }
langchain4j-gemini = { group = "com.github.agg23.langchain4j", name = "langchain4j-google-ai-gemini", version.ref = "langchain4j" }
langchain4j-openai = { group = "com.github.agg23.langchain4j", name = "langchain4j-open-ai", version.ref = "langchain4j" }
langchain4j-kotlin = { group = "com.github.agg23.langchain4j", name = "langchain4j-kotlin", version.ref = "langchain4j" }
ktor-client-android = { group = "io.ktor", name = "ktor-client-android", version.ref = "ktor-client" }
ktor-content-negociation = { group = "io.ktor", name = "ktor-client-content-negotiation", version.ref = "ktor-client" }
ktor-serialization-kotlinx-json = { group = "io.ktor", name = "ktor-serialization-kotlinx-json", version.ref = "ktor-client" }

View File

@ -30,11 +30,11 @@ android {
}
}
compileOptions {
sourceCompatibility = JavaVersion.VERSION_11
targetCompatibility = JavaVersion.VERSION_11
sourceCompatibility = JavaVersion.VERSION_17
targetCompatibility = JavaVersion.VERSION_17
}
kotlinOptions {
jvmTarget = "11"
jvmTarget = "17"
}
buildFeatures {
compose = true
@ -81,17 +81,25 @@ dependencies {
"aipinImplementation"(libs.moonlight.ui)
"aipinSimulatorImplementation"(libs.moonlight.ui)
implementation(libs.langchain4j.kotlin)
implementation(libs.langchain4j)
implementation(libs.langchain4j.openai)
implementation(libs.langchain4j.gemini)
implementation(libs.androidx.camera.core)
implementation(libs.androidx.camera.lifecycle)
implementation(libs.androidx.camera.camera2)
implementation(libs.ktor.client.android)
implementation(libs.kotlinx.serialization.json)
implementation(libs.onnx.runtime.android)
implementation(libs.sentence.embeddings)
implementation(libs.androidx.core.ktx)
implementation(libs.androidx.lifecycle.runtime.compose)
implementation(libs.androidx.lifecycle.runtime.ktx)
implementation(libs.androidx.lifecycle.process)
implementation(libs.androidx.lifecycle.viewmodel.compose)
implementation(libs.androidx.activity.compose)
implementation(platform(libs.androidx.compose.bom))

View File

@ -5,6 +5,6 @@ import androidx.compose.runtime.Composable
typealias ConversationRenderer = com.penumbraos.mabl.aipincore.ConversationRenderer
@Composable
fun PlatformUI(uiComponents: UIComponents) =
fun PlatformUI(uiComponents: UIComponents?) =
com.penumbraos.mabl.aipincore.PlatformUI(uiComponents)
typealias UIFactory = com.penumbraos.mabl.aipincore.UIFactory

View File

@ -25,7 +25,7 @@ import com.penumbraos.mabl.ui.UIComponents
@Composable
fun SimulatedPinDisplay(
modifier: Modifier = Modifier,
uiComponents: UIComponents
uiComponents: UIComponents?
) {
val density = LocalDensity.current

View File

@ -20,7 +20,7 @@ import com.penumbraos.mabl.simulation.SimulatedPinDisplay
import com.penumbraos.mabl.simulation.SimulatedTouchpad
@Composable
fun PlatformUI(uiComponents: UIComponents) {
fun PlatformUI(uiComponents: UIComponents?) {
// AI Pin Simulator: Three-panel layout for development and testing
Row(
modifier = Modifier.fillMaxSize()

View File

@ -1,13 +1,10 @@
package com.penumbraos.mabl.aipincore
import android.annotation.SuppressLint
import android.content.Context
import android.media.MediaPlayer
import android.util.Log
import com.penumbraos.mabl.services.AllControllers
import com.penumbraos.mabl.types.Error
import com.penumbraos.mabl.ui.interfaces.IConversationRenderer
import java.io.File
private const val TAG = "AiPinConversationRenderer"
@ -16,24 +13,9 @@ class ConversationRenderer(
private val controllers: AllControllers,
private val statusBroadcaster: SettingsStatusBroadcaster? = null
) : IConversationRenderer {
private val listeningMediaPlayer = MediaPlayer()
@SuppressLint("SdCardPath")
private val listeningSoundEffectFile = File("/sdcard/penumbra/mabl/sounds/listening.mp3")
// val penumbraClient = PenumbraClient(context)
init {
try {
if (listeningSoundEffectFile.exists()) {
listeningMediaPlayer.setDataSource(listeningSoundEffectFile.absolutePath)
listeningMediaPlayer.prepareAsync()
}
} catch (e: Exception) {
Log.e(TAG, "Failed to load listening sound effect", e)
}
}
// init {
// CoroutineScope(Dispatchers.Default).launch {
// penumbraClient.waitForBridge()
@ -65,8 +47,8 @@ class ConversationRenderer(
override fun showListening(isListening: Boolean) {
Log.d(TAG, "Listening: $isListening")
if (isListening && listeningSoundEffectFile.exists()) {
listeningMediaPlayer.start()
if (isListening) {
controllers.soundEffectManager.playStartListeningEffect()
}
}

View File

@ -65,15 +65,28 @@ open class PlatformInputHandler(
object : ITouchpadGestureDelegate {
override fun onGesture(gesture: TouchpadGesture) {
// TODO: Build proper API for Input Handler to perform standardized triggers
if (gesture.kind != TouchpadGestureKind.HOLD_END) {
// Any gesture that isn't a release should halt talking
interactionFlowManager.finishListening()
if (gesture.kind != TouchpadGestureKind.HOLD_END &&
gesture.kind != TouchpadGestureKind.FINGER_DOWN &&
gesture.kind != TouchpadGestureKind.GESTURE_CANCEL) {
// Any gesture that isn't a release (or intermediate finger down/cancel) should halt talking
interactionFlowManager.cancelTalking()
}
when (gesture.kind) {
TouchpadGestureKind.FINGER_DOWN -> {
// Immediately start listening, even if we abort later
interactionFlowManager.startListening()
}
TouchpadGestureKind.GESTURE_CANCEL -> {
interactionFlowManager.finishListening(abort = true)
}
TouchpadGestureKind.DOUBLE_TAP -> {
// TODO: Fix double tap with two fingers
// if (gesture.fingerCount == 2) {
// Cancel listening if it is ongoing
interactionFlowManager.finishListening(abort = true)
interactionFlowManager.takePicture()
// }
}

View File

@ -7,22 +7,26 @@ import androidx.compose.foundation.layout.Column
import androidx.compose.foundation.layout.fillMaxSize
import androidx.compose.foundation.layout.fillMaxWidth
import androidx.compose.foundation.layout.padding
import androidx.compose.foundation.lazy.items
import androidx.compose.material3.CircularProgressIndicator
import androidx.compose.runtime.Composable
import androidx.compose.runtime.LaunchedEffect
import androidx.compose.runtime.MutableState
import androidx.compose.runtime.getValue
import androidx.compose.runtime.mutableStateOf
import androidx.compose.runtime.remember
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.platform.LocalContext
import androidx.compose.ui.text.TextStyle
import androidx.compose.ui.text.style.TextAlign
import androidx.compose.ui.tooling.preview.Preview
import androidx.compose.ui.unit.dp
import androidx.compose.ui.unit.sp
import androidx.compose.ui.viewinterop.AndroidView
import androidx.lifecycle.ProcessLifecycleOwner
import androidx.lifecycle.ViewModel
import androidx.lifecycle.ViewModelProvider
import androidx.lifecycle.compose.LifecycleStartEffect
import androidx.lifecycle.viewmodel.compose.viewModel
import com.open.pin.ui.PinTheme
import com.open.pin.ui.components.text.PinText
@ -39,57 +43,15 @@ import com.penumbraos.mabl.aipincore.view.model.ConversationDisplayNav
import com.penumbraos.mabl.aipincore.view.model.NavViewModel
import com.penumbraos.mabl.aipincore.view.model.PlatformViewModel
import com.penumbraos.mabl.aipincore.view.nav.Navigation
import com.penumbraos.mabl.data.AppDatabase
import com.penumbraos.mabl.data.types.ConversationMessage
import com.penumbraos.mabl.ui.UIComponents
@Composable
fun PlatformUI(uiComponents: UIComponents) {
val context = LocalContext.current
val database = remember { AppDatabase.getDatabase(context) }
fun PlatformUI(uiComponents: UIComponents?) {
val snapCoordinator = remember { mutableStateOf(SnapCoordinator()) }
val actualViewModel = uiComponents.platformCapabilities.getViewModel() as PlatformViewModel
var displayDebugView = remember { mutableStateOf(false) }
// Push view model into owner
viewModel<PlatformViewModel>(factory = object : ViewModelProvider.Factory {
override fun <T : ViewModel> create(modelClass: Class<T>): T {
@Suppress("UNCHECKED_CAST")
return actualViewModel as T
}
})
viewModel<NavViewModel>(factory = object : ViewModelProvider.Factory {
override fun <T : ViewModel> create(modelClass: Class<T>): T {
@Suppress("UNCHECKED_CAST")
return actualViewModel.navViewModel as T
}
})
val backDispatcher = LocalOnBackPressedDispatcherOwner.current
LaunchedEffect(Unit) {
actualViewModel.backGestureEvent.collect {
backDispatcher?.onBackPressedDispatcher?.onBackPressed()
}
}
LaunchedEffect(Unit) {
actualViewModel.openCurrentConversationEvent.collect {
val currentConversation =
actualViewModel.conversationRepository.getLastActiveConversation()
if (currentConversation != null) {
actualViewModel.navViewModel.pushView(ConversationDisplayNav(currentConversation.id))
}
}
}
LaunchedEffect(Unit) {
actualViewModel.debugChannel.collect {
displayDebugView.value = it
}
}
PinTheme {
ProvideSnapCoordinator(coordinator = snapCoordinator.value) {
Box(
@ -98,7 +60,18 @@ fun PlatformUI(uiComponents: UIComponents) {
.background(color = PinTheme.colors.background)
) {
// For some very strange reason things on the bottom are higher z-index
Navigation()
if (uiComponents != null) {
Content(uiComponents, displayDebugView)
} else {
Box(
modifier = Modifier
.fillMaxSize()
.background(color = PinTheme.colors.background),
contentAlignment = androidx.compose.ui.Alignment.Center
) {
CircularProgressIndicator()
}
}
if (displayDebugView.value) {
VoronoiVisualizer(
alpha = 0.4f
@ -113,6 +86,62 @@ fun PlatformUI(uiComponents: UIComponents) {
}
}
@Composable
fun Content(uiComponents: UIComponents, displayDebugView: MutableState<Boolean>) {
val platformViewModel = uiComponents.platformCapabilities.getViewModel() as PlatformViewModel
// Push view model into owner
viewModel<PlatformViewModel>(factory = object : ViewModelProvider.Factory {
override fun <T : ViewModel> create(modelClass: Class<T>): T {
@Suppress("UNCHECKED_CAST")
return platformViewModel as T
}
})
viewModel<NavViewModel>(factory = object : ViewModelProvider.Factory {
override fun <T : ViewModel> create(modelClass: Class<T>): T {
@Suppress("UNCHECKED_CAST")
return platformViewModel.navViewModel as T
}
})
val processLifecycle = remember { ProcessLifecycleOwner.get().lifecycle }
LifecycleStartEffect(processLifecycle) {
platformViewModel.appIsForeground = true
onStopOrDispose {
platformViewModel.appIsForeground = false
}
}
val backDispatcher = LocalOnBackPressedDispatcherOwner.current
LaunchedEffect(Unit) {
platformViewModel.backGestureEvent.collect {
if (!platformViewModel.navViewModel.isHomeScreen.value) {
backDispatcher?.onBackPressedDispatcher?.onBackPressed()
}
}
}
LaunchedEffect(Unit) {
platformViewModel.openCurrentConversationEvent.collect {
val currentConversation =
platformViewModel.conversationRepository.getLastActiveConversation()
if (currentConversation != null) {
platformViewModel.navViewModel.pushView(ConversationDisplayNav(currentConversation.id))
}
}
}
LaunchedEffect(Unit) {
platformViewModel.debugChannel.collect {
displayDebugView.value = it
}
}
Navigation()
}
@Composable
fun ConversationList(
modifier: Modifier = Modifier,
@ -138,12 +167,10 @@ fun ConversationList(
showScrollButtons = !menuOpen,
autoHideButtons = true
) {
Column {
for (message in messages) {
MessageItem(
message = message,
)
}
items(messages, key = { it.id }) { message ->
MessageItem(
message = message,
)
}
}
}

View File

@ -7,6 +7,8 @@ interface ITouchpadGestureDelegate {
data class TouchpadGesture(val kind: TouchpadGestureKind, val duration: Long, val fingerCount: Int)
enum class TouchpadGestureKind {
FINGER_DOWN,
GESTURE_CANCEL,
SINGLE_TAP,
DOUBLE_TAP,
HOLD_START,

View File

@ -73,6 +73,14 @@ class TouchpadGestureManager(
MotionEvent.ACTION_DOWN -> {
activePointers.add(event.getPointerId(0))
sendEventIfAllowed(event, updateLastEventTime = false) {
TouchpadGesture(
TouchpadGestureKind.FINGER_DOWN,
0,
activePointers.size
)
}
if (activePointers.size == 1) {
holdStartTime = event.eventTime
singleFingerHoldHandler = Handler(Looper.getMainLooper())
@ -91,14 +99,26 @@ class TouchpadGestureManager(
activePointers.remove(event.getPointerId(0))
// Cancel any pending single finger hold
val wasPendingHold = singleFingerHoldHandler != null
singleFingerHoldHandler?.removeCallbacksAndMessages(null)
singleFingerHoldHandler = null
val duration = event.eventTime - holdStartTime
// Handle hold end
if (isHolding) {
val duration = event.eventTime - holdStartTime
delegate.onGesture(TouchpadGesture(TouchpadGestureKind.HOLD_END, duration, 1))
isHolding = false
} else if (wasPendingHold && activePointers.isEmpty()) {
// Finger was lifted before any gesture started
// Only send if we didn't just send a recognized gesture
sendEventIfAllowed(event, updateLastEventTime = false) {
TouchpadGesture(
TouchpadGestureKind.GESTURE_CANCEL,
duration,
1
)
}
}
}
// A non-primary touch has changed
@ -149,12 +169,18 @@ class TouchpadGestureManager(
/**
* Send TouchpadGesture if allowed based on time since last event. Specifically to prevent sending gesture start events too close together
*/
private fun sendEventIfAllowed(event: MotionEvent, lambda: () -> TouchpadGesture) {
private fun sendEventIfAllowed(
event: MotionEvent,
updateLastEventTime: Boolean = true,
lambda: () -> TouchpadGesture,
) {
if (event.eventTime < lastEventTime + MIN_GESTURE_SEPARATION_MS) {
return
}
lastEventTime = event.eventTime
if (updateLastEventTime) {
lastEventTime = event.eventTime
}
delegate.onGesture(lambda())
}
@ -163,8 +189,15 @@ class TouchpadGestureManager(
if (isHolding) {
delegate.onGesture(TouchpadGesture(TouchpadGestureKind.HOLD_END, duration, 2))
isHolding = false
} else if (duration < 200) {
delegate.onGesture(TouchpadGesture(TouchpadGestureKind.SINGLE_TAP, duration, 2))
} else {
// Finger was lifted before any gesture completed
// Only send if we didn't just send a recognized gesture
sendEventIfAllowed(event, updateLastEventTime = false) {
TouchpadGesture(TouchpadGestureKind.GESTURE_CANCEL, duration, 2)
}
}
}
}

View File

@ -3,23 +3,20 @@ package com.penumbraos.mabl.aipincore.view.model
import androidx.lifecycle.ViewModel
import com.penumbraos.mabl.data.types.Conversation
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.flow
import kotlinx.coroutines.flow.map
class ConversationsViewModel(private val viewModel: PlatformViewModel) : ViewModel() {
val conversationsWithInjectedTitle: Flow<List<Conversation>> = flow {
viewModel.database.conversationDao().getAllConversations().collect {
emit(it.map {
val firstMessage = viewModel.database.conversationDao().getFirstUserMessage(it.id)
Conversation(
id = it.id,
title = firstMessage ?: it.title,
createdAt = it.createdAt,
lastActivity = it.lastActivity,
isActive = it.isActive
)
})
}
}
val conversationsWithInjectedTitle: Flow<List<Conversation>> =
viewModel.database.conversationDao()
.getConversationsWithFirstUserMessage()
.map { conversations ->
conversations.map { conversationWithFirstMessage ->
val conversation = conversationWithFirstMessage.conversation
conversation.copy(
title = conversationWithFirstMessage.firstUserMessage ?: conversation.title
)
}
}
fun openConversation(id: String) {
viewModel.navViewModel.pushView(ConversationDisplayNav(id))

View File

@ -14,6 +14,10 @@ data object DummyNav
class NavViewModel() : ViewModel() {
val backStack = mutableStateListOf<Any>(HomeNav)
val isHomeScreen = derivedStateOf {
backStack.lastOrNull() == HomeNav
}
val isMenuOpen = derivedStateOf {
backStack.lastOrNull() == MenuNav
}

View File

@ -22,6 +22,8 @@ class PlatformViewModel(
) : ViewModel() {
val navViewModel = NavViewModel()
var appIsForeground: Boolean = false
private val _backGestureChannel = Channel<Unit>(Channel.RENDEZVOUS)
val backGestureEvent = _backGestureChannel.receiveAsFlow()
@ -59,11 +61,19 @@ class PlatformViewModel(
}
fun backGesture() {
if (!appIsForeground) {
return
}
Log.d("PlatformViewModel", "Back gesture received")
_backGestureChannel.trySend(Unit)
}
fun toggleMenuVisible() {
if (!appIsForeground) {
return
}
if (!closeMenu()) {
Log.d("PlatformViewModel", "Showing menu")
navViewModel.backStack.add(MenuNav)

View File

@ -7,6 +7,7 @@ import androidx.compose.foundation.layout.Column
import androidx.compose.foundation.layout.fillMaxSize
import androidx.compose.foundation.layout.fillMaxWidth
import androidx.compose.foundation.layout.padding
import androidx.compose.foundation.lazy.items
import androidx.compose.foundation.shape.RoundedCornerShape
import androidx.compose.material3.Text
import androidx.compose.runtime.Composable
@ -94,7 +95,7 @@ fun AllConversationsList(
showScrollButtons = !menuOpen,
autoHideButtons = true
) {
for (conversation in conversations) {
items(conversations, key = { it.id }) { conversation ->
ConversationTitleCard(
conversation = conversation,
)

View File

@ -1,5 +1,6 @@
import android.content.ComponentName
import android.content.Intent
import android.util.Log
import androidx.compose.runtime.Composable
import androidx.compose.runtime.LaunchedEffect
import androidx.compose.ui.platform.LocalContext
@ -11,13 +12,20 @@ fun Settings(navViewModel: NavViewModel = viewModel<NavViewModel>()) {
val context = LocalContext.current
LaunchedEffect(Unit) {
navViewModel.popView()
val intent = Intent().apply {
component = ComponentName(
"humane.experience.settings",
"humane.experience.settings.SettingsExperience"
)
addFlags(Intent.FLAG_ACTIVITY_NEW_TASK or Intent.FLAG_ACTIVITY_CLEAR_TASK)
}
try {
context.startActivity(intent)
} catch (e: Exception) {
Log.e("Settings", "Failed to start settings", e)
}
context.startActivity(intent)
navViewModel.popView()
}
}

View File

@ -37,7 +37,7 @@ import com.penumbraos.mabl.discovery.PluginManager
import com.penumbraos.mabl.discovery.PluginService
@Composable
fun PlatformUI(uiComponents: UIComponents) {
fun PlatformUI(uiComponents: UIComponents?) {
val conversationRenderer = uiComponents.conversationRenderer as ConversationRenderer
// when (navigationController.currentScreen.value) {

View File

@ -39,7 +39,9 @@
android:name=".MainActivity"
android:exported="true"
android:theme="@style/Theme.MABL"
android:launchMode="singleInstance">
android:launchMode="singleTask"
android:excludeFromRecents="true"
android:alwaysRetainTaskState="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
@ -48,12 +50,28 @@
<category android:name="android.intent.category.DEFAULT" />
</intent-filter>
</activity>
<service
android:name=".services.CameraService"
android:enabled="true"
android:exported="false"
android:foregroundServiceType="camera" />
<service
android:name=".plugins.llm.LangchainLlmService"
android:exported="true"
tools:ignore="ExportedService">
<intent-filter>
<action android:name="com.penumbraos.mabl.sdk.action.LLM_SERVICE" />
</intent-filter>
<meta-data
android:name="com.penumbraos.mabl.sdk.metadata.DISPLAY_NAME"
android:value="Langchain LLM Service" />
<meta-data
android:name="com.penumbraos.mabl.sdk.metadata.DESCRIPTION"
android:value="Langchain language model service supporting multiple providers" />
</service>
</application>
</manifest>

View File

@ -5,11 +5,8 @@ import android.util.Log
import androidx.activity.ComponentActivity
import androidx.activity.compose.setContent
import androidx.activity.enableEdgeToEdge
import androidx.compose.foundation.layout.Box
import androidx.compose.foundation.layout.fillMaxSize
import androidx.compose.material3.CircularProgressIndicator
import androidx.compose.material3.Surface
import androidx.compose.runtime.Composable
import androidx.compose.runtime.mutableStateOf
import androidx.compose.ui.Modifier
import androidx.lifecycle.lifecycleScope
@ -117,6 +114,8 @@ class MainActivity : ComponentActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
Log.d("MainActivity", "MainActivity created")
lifecycleScope.launch {
controllers = AllControllers(lifecycleScope, this@MainActivity)
controllers.initialize()
@ -149,29 +148,15 @@ class MainActivity : ComponentActivity() {
setContent {
MABLTheme {
Surface(modifier = Modifier.fillMaxSize()) {
Content()
PlatformUI(uiComponentsState.value)
}
}
}
}
@Composable
fun Content() {
if (uiComponentsState.value != null) {
PlatformUI(uiComponentsState.value!!)
} else {
// Show loading state while services are connecting
Box(
modifier = Modifier.fillMaxSize(),
contentAlignment = androidx.compose.ui.Alignment.Center
) {
CircularProgressIndicator()
}
}
}
override fun onDestroy() {
super.onDestroy()
Log.d("MainActivity", "MainActivity destroyed")
controllers.shutdown(this)
}

View File

@ -238,6 +238,7 @@ class ConversationManager(
})
}
} else {
Log.d(TAG, "LLM requested 0 tool calls: ${response.text}")
// No tool calls, this is the final response
persistAssistantMessage(responseText, emptyArray())
@ -296,19 +297,7 @@ class ConversationManager(
}
override fun onCompleteResponse(response: LlmResponse) {
// This should be the final response after tool execution
val message = BinderConversationMessage().apply {
type = "assistant"
content = response.text ?: ""
toolCalls = emptyArray()
toolCallId = null
}
conversationHistory.add(message)
// Persist final assistant response to database
persistMessageSync("assistant", response.text ?: "")
callback.onCompleteResponse(response.text ?: "")
handleLlmResponse(response, filteredTools, callback)
}
override fun onError(error: String) {

View File

@ -56,8 +56,7 @@ class StaticQueryManager(
val tools = toolService.getToolDefinitions()
val map = mutableMapOf<String, ToolDefinition>()
for (tool in tools) {
val staticQueries = tool.description.split(",")
for (staticQuery in staticQueries) {
for (staticQuery in tool.examples ?: emptyArray()) {
map[staticQuery] = tool
}
}

View File

@ -1,6 +1,9 @@
package com.penumbraos.mabl.conversation
import android.content.ComponentName
import android.content.Context
import android.content.Intent
import android.util.Log
import com.penumbraos.mabl.sdk.IToolCallback
import com.penumbraos.mabl.sdk.ToolCall
import com.penumbraos.mabl.sdk.ToolDefinition
@ -12,11 +15,12 @@ import kotlinx.coroutines.launch
import org.json.JSONObject
private const val NEW_CONVERSATION = "new_conversation"
private const val OPEN_SETTINGS = "open_settings"
private const val REBOOT_NOW = "reboot_now"
class StaticQueryToolService(
private val allControllers: AllControllers,
context: Context,
private val context: Context,
val coroutineScope: CoroutineScope
) : ToolService("StaticQueryToolService") {
// TODO: This should work on non-Pin
@ -36,6 +40,27 @@ class StaticQueryToolService(
}
}
OPEN_SETTINGS -> {
coroutineScope.launch {
// TODO: This should work on non-Pin
val intent = Intent().apply {
component = ComponentName(
"humane.experience.settings",
"humane.experience.settings.SettingsExperience"
)
addFlags(Intent.FLAG_ACTIVITY_NEW_TASK or Intent.FLAG_ACTIVITY_CLEAR_TASK)
}
try {
context.startActivity(intent)
callback.onSuccess("Opened settings")
} catch (e: Exception) {
Log.e("Settings", "Failed to start settings", e)
callback.onSuccess("Failed to open settings")
}
}
}
REBOOT_NOW -> {
coroutineScope.launch {
try {
@ -58,19 +83,24 @@ class StaticQueryToolService(
return arrayOf(
ToolDefinition().apply {
name = NEW_CONVERSATION
description = "new conversation"
examples = arrayOf(
"start a new conversation",
"new chat",
"reset the conversation"
"new conversation",
"new chat"
)
},
ToolDefinition().apply {
name = OPEN_SETTINGS
examples = arrayOf(
"open settings",
"open system settings",
"open human settings",
"launch settings"
)
},
ToolDefinition().apply {
name = REBOOT_NOW
description = "reboot now,emergency reboot"
examples = arrayOf(
"reboot now",
"restart the pin",
"emergency reboot"
)
}

View File

@ -15,7 +15,7 @@ import com.penumbraos.mabl.data.types.ConversationMessage
@Database(
entities = [Conversation::class, ConversationMessage::class, ConversationImage::class],
version = 4,
version = 5,
exportSchema = false
)
abstract class AppDatabase : RoomDatabase() {
@ -64,6 +64,14 @@ abstract class AppDatabase : RoomDatabase() {
}
}
private val MIGRATION_4_5 = object : Migration(4, 5) {
override fun migrate(database: SupportSQLiteDatabase) {
database.execSQL("CREATE INDEX IF NOT EXISTS `index_conversation_messages_conversationId` ON `conversation_messages` (`conversationId`)")
database.execSQL("CREATE INDEX IF NOT EXISTS `index_conversation_messages_conversationId_type_timestamp` ON `conversation_messages` (`conversationId`, `type`, `timestamp`)")
database.execSQL("CREATE INDEX IF NOT EXISTS `index_conversation_images_messageId` ON `conversation_images` (`messageId`)")
}
}
fun getDatabase(context: Context): AppDatabase {
return INSTANCE ?: synchronized(this) {
val instance = Room.databaseBuilder(
@ -71,7 +79,7 @@ abstract class AppDatabase : RoomDatabase() {
AppDatabase::class.java,
"app_database"
)
.addMigrations(MIGRATION_1_2, MIGRATION_2_3, MIGRATION_3_4)
.addMigrations(MIGRATION_1_2, MIGRATION_2_3, MIGRATION_3_4, MIGRATION_4_5)
.build()
INSTANCE = instance
instance

View File

@ -1,17 +1,59 @@
package com.penumbraos.mabl.data.dao
import androidx.room.Dao
import androidx.room.Embedded
import androidx.room.Insert
import androidx.room.Query
import androidx.room.Update
import com.penumbraos.mabl.data.types.Conversation
import kotlinx.coroutines.flow.Flow
data class ConversationWithFirstUserMessage(
@Embedded val conversation: Conversation,
val firstUserMessage: String?
)
@Dao
interface ConversationDao {
@Query("SELECT * FROM conversations ORDER BY lastActivity DESC LIMIT :limit")
fun getAllConversations(limit: Int = 50): Flow<List<Conversation>>
@Query(
"""
WITH first_user_messages AS (
SELECT
cm.conversationId,
cm.content
FROM conversation_messages cm
INNER JOIN (
SELECT
conversationId,
MIN(timestamp) AS firstTimestamp,
MIN(id) AS firstId
FROM conversation_messages
WHERE type = 'user'
GROUP BY conversationId
) first_user_timestamp ON first_user_timestamp.conversationId = cm.conversationId
WHERE
cm.type = 'user' AND
cm.timestamp = first_user_timestamp.firstTimestamp AND
cm.id = first_user_timestamp.firstId
)
SELECT
c.id AS id,
c.title AS title,
c.createdAt AS createdAt,
c.lastActivity AS lastActivity,
c.isActive AS isActive,
fum.content AS firstUserMessage
FROM conversations c
LEFT JOIN first_user_messages fum ON fum.conversationId = c.id
ORDER BY c.lastActivity DESC
LIMIT :limit
"""
)
fun getConversationsWithFirstUserMessage(limit: Int = 50): Flow<List<ConversationWithFirstUserMessage>>
@Query("SELECT * FROM conversations WHERE id = :id")
suspend fun getConversation(id: String): Conversation?
@ -36,12 +78,4 @@ interface ConversationDao {
@Query("SELECT COUNT(*) FROM conversation_messages WHERE conversationId = :conversationId")
suspend fun getMessageCount(conversationId: String): Int
@Query(
"""
SELECT cm.content FROM conversation_messages cm
WHERE cm.conversationId = :conversationId AND cm.type = 'user'
ORDER BY cm.timestamp ASC LIMIT 1
"""
)
suspend fun getFirstUserMessage(conversationId: String): String?
}

View File

@ -2,12 +2,16 @@ package com.penumbraos.mabl.data.types
import androidx.room.Entity
import androidx.room.ForeignKey
import androidx.room.Index
import androidx.room.PrimaryKey
import kotlinx.serialization.Serializable
@Serializable
@Entity(
tableName = "conversation_images",
indices = [
Index(value = ["messageId"])
],
foreignKeys = [ForeignKey(
entity = ConversationMessage::class,
parentColumns = ["id"],

View File

@ -2,12 +2,17 @@ package com.penumbraos.mabl.data.types
import androidx.room.Entity
import androidx.room.ForeignKey
import androidx.room.Index
import androidx.room.PrimaryKey
import kotlinx.serialization.Serializable
@Serializable
@Entity(
tableName = "conversation_messages",
indices = [
Index(value = ["conversationId"]),
Index(value = ["conversationId", "type", "timestamp"])
],
foreignKeys = [ForeignKey(
entity = Conversation::class,
parentColumns = ["id"],

View File

@ -6,7 +6,8 @@ import com.penumbraos.mabl.types.Error
interface IInteractionFlowManager {
fun startListening(requestImage: Boolean = false)
fun startConversationFromInput(userInput: String)
fun finishListening()
fun finishListening(abort: Boolean = false)
fun cancelTalking()
fun isFlowActive(): Boolean
fun getCurrentFlowState(): InteractionFlowState

View File

@ -44,6 +44,8 @@ class InteractionFlowManager
private var stateCallback: InteractionStateCallback? = null
private var contentCallback: InteractionContentCallback? = null
private var didAbort: Boolean = false
private var cameraService: CameraService? = null
private var isCameraServiceBound = false
@ -64,17 +66,30 @@ class InteractionFlowManager
private val sttCallback = object : ISttCallback.Stub() {
override fun onPartialTranscription(partialText: String) {
if (didAbort) {
return
}
Log.d(TAG, "STT partial transcription: $partialText")
contentCallback?.onPartialTranscription(partialText)
}
override fun onFinalTranscription(finalText: String) {
Log.d(TAG, "STT final transcription: $finalText")
setState(InteractionFlowState.PROCESSING)
contentCallback?.onFinalTranscription(finalText)
if (didAbort) {
return
}
// Start conversation with the transcribed text
startConversationFromInput(finalText)
if (finalText.trim().isEmpty()) {
Log.d(TAG, "STT transcription was empty, skipping")
setState(InteractionFlowState.IDLE)
stateCallback?.onError(Error.SttError("Empty transcription"))
} else {
Log.d(TAG, "STT final transcription: $finalText")
setState(InteractionFlowState.PROCESSING)
contentCallback?.onFinalTranscription(finalText)
// Start conversation with the transcribed text
startConversationFromInput(finalText)
}
}
override fun onError(errorMessage: String) {
@ -95,16 +110,22 @@ class InteractionFlowManager
}
override fun startListening(requestImage: Boolean) {
if (currentState != InteractionFlowState.IDLE) {
currentModality =
if (requestImage) InteractionFlowModality.Vision else InteractionFlowModality.Speech
if (currentState == InteractionFlowState.LISTENING) {
Log.d(TAG, "Already listening. Continuing")
return
} else if (currentState != InteractionFlowState.IDLE) {
Log.w(TAG, "Cannot start listening, current state: $currentState")
return
}
didAbort = false
try {
allControllers.stt.startListening()
setState(InteractionFlowState.LISTENING)
currentModality =
if (requestImage) InteractionFlowModality.Vision else InteractionFlowModality.Speech
} catch (e: Exception) {
Log.e(TAG, "Failed to start listening: ${e.message}")
stateCallback?.onError(Error.SttError("Failed to start listening: ${e.message}"))
@ -150,12 +171,12 @@ class InteractionFlowManager
}
override fun onCompleteResponse(finalResponse: String) {
Log.d(TAG, "LLM complete response: $finalResponse")
allControllers.soundEffectManager.stopWaitingEffect()
if (currentState == InteractionFlowState.PROCESSING) {
// We never sent any partial responses. Make sure we send at least one
onPartialResponse(finalResponse)
}
Log.d(TAG, "LLM complete response: $finalResponse")
contentCallback?.onFinalResponse(finalResponse)
setState(InteractionFlowState.IDLE)
}
@ -170,11 +191,12 @@ class InteractionFlowManager
)
}
}
override fun finishListening() {
override fun finishListening(abort: Boolean) {
Log.d(TAG, "Stopping listening, state: $currentState")
setState(InteractionFlowState.CANCELLING)
didAbort = abort
allControllers.stt.cancelListening()
allControllers.tts.service?.stopSpeaking()
@ -182,6 +204,10 @@ class InteractionFlowManager
stateCallback?.onUserFinished()
}
override fun cancelTalking() {
allControllers.tts.service?.stopSpeaking()
}
override fun isFlowActive(): Boolean {
return currentState != InteractionFlowState.IDLE
}

View File

@ -0,0 +1,131 @@
package com.penumbraos.mabl.plugins.llm
import com.penumbraos.sdk.PenumbraClient
import com.penumbraos.sdk.http.ktor.HttpClientPlugin
import dev.langchain4j.exception.HttpException
import dev.langchain4j.http.client.HttpClient
import dev.langchain4j.http.client.HttpMethod
import dev.langchain4j.http.client.HttpRequest
import dev.langchain4j.http.client.SuccessfulHttpResponse
import dev.langchain4j.http.client.sse.ServerSentEventListener
import dev.langchain4j.http.client.sse.ServerSentEventParser
import io.ktor.client.call.body
import io.ktor.client.plugins.sse.SSE
import io.ktor.client.request.HttpRequestBuilder
import io.ktor.client.request.prepareRequest
import io.ktor.client.request.request
import io.ktor.client.request.setBody
import io.ktor.client.request.url
import io.ktor.client.statement.HttpResponse
import io.ktor.client.statement.bodyAsChannel
import io.ktor.content.TextContent
import io.ktor.http.ContentType
import io.ktor.http.isSuccess
import io.ktor.util.toMap
import io.ktor.utils.io.jvm.javaio.toInputStream
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.launch
import kotlinx.coroutines.runBlocking
class KtorHttpClient : HttpClient {
private val coroutineScope: CoroutineScope
private val ktorClient: io.ktor.client.HttpClient
constructor(coroutineScope: CoroutineScope, penumbraClient: PenumbraClient) {
this.coroutineScope = coroutineScope
this.ktorClient = io.ktor.client.HttpClient {
// Otherwise ktor strips ContentType
useDefaultTransformers = false
install(HttpClientPlugin) {
this.penumbraClient = penumbraClient
}
install(SSE)
}
}
override fun execute(request: HttpRequest): SuccessfulHttpResponse {
return runBlocking {
val response = ktorClient.request(request.url()) {
buildRequest(this, request)
}
buildResponse(response, true)
}
}
override fun execute(
request: HttpRequest,
parser: ServerSentEventParser,
listener: ServerSentEventListener
) {
coroutineScope.launch {
ktorClient.prepareRequest {
buildRequest(this, request)
}.execute { response ->
if (!response.status.isSuccess()) {
try {
listener.onError(HttpException(response.status.value, response.body()))
} catch (_: Exception) {
}
return@execute
}
try {
listener.onOpen(buildResponse(response, false))
} catch (_: Exception) {
return@execute
}
try {
val stream = response.bodyAsChannel().toInputStream()
parser.parse(stream, listener)
listener.onClose()
} catch (e: Exception) {
listener.onError(e)
}
}
}
}
private fun buildRequest(builder: HttpRequestBuilder, langChainRequest: HttpRequest) {
builder.url(langChainRequest.url())
builder.method = when (langChainRequest.method()) {
HttpMethod.GET -> io.ktor.http.HttpMethod.Get
HttpMethod.POST -> io.ktor.http.HttpMethod.Post
HttpMethod.DELETE -> io.ktor.http.HttpMethod.Delete
}
for ((key, values) in langChainRequest.headers()) {
builder.headers.appendAll(key, values)
}
val contentTypeString = langChainRequest.headers()["ContentType"]?.first() ?: ""
val contentType = try {
ContentType.parse(contentTypeString)
} catch (_: Exception) {
ContentType.Application.Json
}
builder.setBody(
TextContent(
langChainRequest.body(),
contentType
)
)
}
private suspend fun buildResponse(
response: HttpResponse,
withBody: Boolean
): SuccessfulHttpResponse {
val builder = SuccessfulHttpResponse.builder()
.statusCode(response.status.value)
.headers(response.headers.toMap())
if (withBody) {
builder.body(response.body())
}
return builder.build()
}
}

View File

@ -0,0 +1,36 @@
package com.penumbraos.mabl.plugins.llm
import com.penumbraos.sdk.PenumbraClient
import dev.langchain4j.http.client.HttpClient
import dev.langchain4j.http.client.HttpClientBuilder
import kotlinx.coroutines.CoroutineScope
import java.time.Duration
class KtorHttpClientBuilder(coroutineScope: CoroutineScope, penumbraClient: PenumbraClient) :
HttpClientBuilder {
val client = KtorHttpClient(coroutineScope, penumbraClient)
override fun connectTimeout(): Duration? {
// TODO
return Duration.ZERO
}
override fun connectTimeout(timeout: Duration?): HttpClientBuilder {
// TODO
return this
}
override fun readTimeout(): Duration? {
// TODO
return Duration.ZERO
}
override fun readTimeout(timeout: Duration?): HttpClientBuilder {
// TODO
return this
}
override fun build(): HttpClient {
return client
}
}

View File

@ -0,0 +1,331 @@
@file:OptIn(ExperimentalEncodingApi::class)
package com.penumbraos.mabl.plugins.llm
import android.annotation.SuppressLint
import android.content.Intent
import android.os.IBinder
import android.system.Os
import android.system.OsConstants
import android.util.Log
import com.penumbraos.mabl.sdk.BinderConversationMessage
import com.penumbraos.mabl.sdk.ILlmCallback
import com.penumbraos.mabl.sdk.ILlmService
import com.penumbraos.mabl.sdk.LlmResponse
import com.penumbraos.mabl.sdk.MablService
import com.penumbraos.mabl.sdk.ToolCall
import com.penumbraos.mabl.sdk.ToolDefinition
import com.penumbraos.mabl.sdk.ToolParameter
import com.penumbraos.sdk.PenumbraClient
import dev.langchain4j.agent.tool.ToolExecutionRequest
import dev.langchain4j.agent.tool.ToolSpecification
import dev.langchain4j.data.message.AiMessage.aiMessage
import dev.langchain4j.data.message.ImageContent
import dev.langchain4j.data.message.SystemMessage.systemMessage
import dev.langchain4j.data.message.TextContent
import dev.langchain4j.data.message.ToolExecutionResultMessage.toolExecutionResultMessage
import dev.langchain4j.data.message.UserMessage.userMessage
import dev.langchain4j.kotlin.model.chat.StreamingChatModelReply
import dev.langchain4j.kotlin.model.chat.chatFlow
import dev.langchain4j.model.chat.StreamingChatModel
import dev.langchain4j.model.chat.request.ChatRequestParameters
import dev.langchain4j.model.chat.request.json.JsonObjectSchema
import dev.langchain4j.model.chat.response.ChatResponse
import dev.langchain4j.model.googleai.GoogleAiGeminiStreamingChatModel
import dev.langchain4j.model.openai.OpenAiStreamingChatModel
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.flow.catch
import kotlinx.coroutines.flow.collect
import kotlinx.coroutines.flow.onEach
import kotlinx.coroutines.launch
import java.io.ByteArrayOutputStream
import java.io.FileInputStream
import kotlin.io.encoding.Base64
import kotlin.io.encoding.ExperimentalEncodingApi
private const val TAG = "LangchainLlmService"
private const val DEFAULT_PROMPT =
"""You are the MABL voice assistant. Your response will be spoken aloud to the user, so keep the response short and to the point.
|Your core responsibilities:
|1. Understand the user's request thoroughly.
|2. Identify which of the provided tools can best fulfill the request.
|3. Execute the tool(s) and provide a concise, accurate response based on the tool's output.
|4. If a tool is necessary to provide up-to-date or factual information (e.g., current news, real-time data), prioritize its use.
|5. Do NOT make up information. If a tool is required to get the answer, use it.
|6. If a query requires knowledge beyond your training data, especially for current events or news, the `web_search` tool is essential.
|7. Do not declare limitations (e.g., "I can only do X") if other relevant tools are available for the user's query. You have access to *all* provided tools.
|8. If no adequate tool is available, you are allowed to fall back on your own knowledge, but only when you have a high confidence of the answer."""
class LangchainLlmService : MablService("LangchainLlmService") {
private val llmScope = CoroutineScope(Dispatchers.IO)
private var model: StreamingChatModel? = null
private val configManager = LlmConfigManager()
private var currentConfig: LlmConfiguration? = null
@SuppressLint("ForegroundServiceType")
override fun onCreate() {
super.onCreate()
llmScope.launch {
var client = PenumbraClient(this@LangchainLlmService)
client.waitForBridge()
try {
currentConfig = configManager.getAvailableConfigs().first()
} catch (e: Exception) {
Log.e(TAG, "Failed to load LLM configuration", e)
}
val config = currentConfig
if (config == null) {
Log.e(TAG, "No valid LLM configuration found")
return@launch
}
try {
Log.d(TAG, "About to create Langchain client")
model = when (config) {
is LlmConfiguration.Gemini -> {
GoogleAiGeminiStreamingChatModel.builder()
.allowGoogleSearch(true)
.allowGoogleMaps(true)
.httpClientBuilder(KtorHttpClientBuilder(llmScope, client))
.apiKey(config.apiKey)
.modelName(config.model)
.temperature(config.temperature)
.maxOutputTokens(config.maxTokens).build()
}
is LlmConfiguration.OpenAI -> {
OpenAiStreamingChatModel.builder()
.httpClientBuilder(KtorHttpClientBuilder(llmScope, client))
.baseUrl(config.baseUrl)
.apiKey(config.apiKey)
.modelName(config.model)
.temperature(config.temperature)
.maxTokens(config.maxTokens).build()
}
}
Log.w(
TAG,
"${config.type} client initialized successfully with model: ${config.model}"
)
} catch (e: Exception) {
Log.e(TAG, "Failed to initialize Langchain client", e)
}
}
}
private val binder = object : ILlmService.Stub() {
// TODO: Remove
override fun setAvailableTools(tools: Array<ToolDefinition>) {
Log.d(TAG, "Received ${tools.size} tool definitions")
}
override fun generateResponse(
messages: Array<BinderConversationMessage>,
tools: Array<ToolDefinition>,
callback: ILlmCallback
) {
Log.w(
TAG,
"Submitting ${messages.size} conversation messages with ${tools.size} filtered tools. Last message: \"${messages.last().content}\""
)
if (model == null) {
Log.e(TAG, "LLM client not initialized")
callback.onError("LLM client not initialized. Check API key configuration.")
return
}
llmScope.launch {
try {
val responseBuilder = StringBuilder()
val toolCalls = mutableListOf<ToolCall>()
val completions = model!!.chatFlow {
this.messages += systemMessage(
currentConfig!!.systemPrompt
?: DEFAULT_PROMPT.trimMargin()
)
this.messages += messages.map { message ->
when (message.type) {
"user" -> {
if (message.imageFile != null) {
val fileDescriptor = message.imageFile.fileDescriptor
// Rewind file descriptor so we can reuse them
// TODO: This somehow needs to live in MABL core
Os.lseek(
fileDescriptor,
0,
OsConstants.SEEK_SET
)
val imageBytes =
FileInputStream(fileDescriptor)
val byteArrayOutputStream = ByteArrayOutputStream()
val buffer = ByteArray(4096)
var bytesRead: Int
while (imageBytes.read(buffer)
.also { bytesRead = it } != -1
) {
byteArrayOutputStream.write(buffer, 0, bytesRead)
}
val imageUrl =
Base64.Default.encode(byteArrayOutputStream.toByteArray())
userMessage(
TextContent(message.content),
ImageContent(
imageUrl,
"image/jpeg",
ImageContent.DetailLevel.HIGH
)
)
} else {
userMessage(TextContent(message.content))
}
}
"assistant" -> aiMessage(
message.content,
message.toolCalls.map { toolCall ->
ToolExecutionRequest.builder().id(toolCall.id)
.name(toolCall.name).arguments(toolCall.parameters)
.build()
}
)
// TODO: This tool name might be wrong/necessary
"tool" -> toolExecutionResultMessage(
message.toolCallId,
message.toolCallId,
message.content
)
else -> userMessage(message.content)
}
}
this.parameters =
ChatRequestParameters.builder().toolSpecifications(
convertToolDefinitionsToAPI(tools)
).build()
}
var finalResponse: ChatResponse? = null
completions
.catch { exception ->
Log.e(TAG, "Error making request", exception)
val content =
"LLM model error: ${exception.message?.removePrefix("Stream error: ")}"
responseBuilder.append(content)
// TODO: This should be onError
callback.onPartialResponse(content)
}
.onEach { chunk ->
when (chunk) {
is StreamingChatModelReply.CompleteResponse -> {
finalResponse = chunk.response
}
is StreamingChatModelReply.PartialResponse -> {
callback.onPartialResponse(chunk.partialResponse)
}
is StreamingChatModelReply.Error -> {
throw chunk.cause
}
}
}
.collect()
if (finalResponse == null) {
// TODO: This should be onError
callback.onCompleteResponse(LlmResponse().apply {
text = "LLM model error: Empty response"
})
return@launch
}
// Send final response
val response = LlmResponse().apply {
text = finalResponse.aiMessage().text() ?: ""
this.toolCalls =
finalResponse.aiMessage().toolExecutionRequests().map { request ->
ToolCall().apply {
id = request.id()
name = request.name()
parameters = request.arguments()
isLLM = true
}
}.toTypedArray()
}
val flattenedCalls = toolCalls.joinToString {
"id: ${it.id}, name: ${it.name}, parameters: ${it.parameters}"
}
Log.w(
TAG,
"LLM response received: \"${response.text}\", $flattenedCalls"
)
callback.onCompleteResponse(response)
} catch (e: Exception) {
Log.e(TAG, "Error generating response", e)
callback.onError("Error generating response: ${e.message}")
}
}
}
}
private fun convertToolDefinitionsToAPI(toolDefinitions: Array<ToolDefinition>): List<ToolSpecification>? {
if (toolDefinitions.isEmpty()) {
return null
}
return toolDefinitions.map { toolDef ->
ToolSpecification.builder().name(toolDef.name).description(toolDef.description)
.parameters(convertParametersToAPI(toolDef.parameters)).build()
}
}
private fun convertParametersToAPI(parameters: Array<ToolParameter>): JsonObjectSchema {
val builder = JsonObjectSchema.builder()
val required = mutableListOf<String>()
for (parameter in parameters) {
if (parameter.required) {
required += parameter.name
}
when (parameter.type.lowercase()) {
"string" -> builder.addStringProperty(parameter.name, parameter.description)
"number", "float", "double", "int" -> builder.addNumberProperty(
parameter.name,
parameter.description
)
"enum" -> builder.addEnumProperty(
parameter.name,
parameter.enumValues.toList(),
parameter.description
)
}
}
return builder.required(required).build()
}
override fun onBind(intent: Intent?): IBinder = binder
override fun onDestroy() {
super.onDestroy()
Log.d(TAG, "Langchain4j LLM service destroyed")
}
}

View File

@ -0,0 +1,82 @@
package com.penumbraos.mabl.plugins.llm
import kotlinx.serialization.SerialName
import kotlinx.serialization.Serializable
import kotlinx.serialization.json.JsonElement
import kotlinx.serialization.json.JsonObject
import kotlinx.serialization.json.JsonTransformingSerializer
import kotlinx.serialization.json.buildJsonObject
import kotlinx.serialization.json.put
private const val DEFAULT_MAX_TOKENS = 1000
private const val DEFAULT_TEMPERATURE = 0.7
interface LlmConfig {
val type: String
val name: String
val apiKey: String
val model: String
val maxTokens: Int
val temperature: Double
val systemPrompt: String?
}
@Serializable
sealed class LlmConfiguration : LlmConfig {
@Serializable
@SerialName("gemini")
data class Gemini(
override val type: String = "Gemini",
override val name: String,
override val apiKey: String,
override val model: String,
override val maxTokens: Int = DEFAULT_MAX_TOKENS,
override val temperature: Double = DEFAULT_TEMPERATURE,
override val systemPrompt: String? = null
) : LlmConfiguration()
@Serializable
@SerialName("openai")
data class OpenAI(
override val type: String = "OpenAI",
override val name: String,
override val apiKey: String,
override val model: String,
val baseUrl: String,
override val maxTokens: Int = DEFAULT_MAX_TOKENS,
override val temperature: Double = DEFAULT_TEMPERATURE,
override val systemPrompt: String? = null
) : LlmConfiguration()
}
object LlmConfigurationSerializer :
JsonTransformingSerializer<LlmConfiguration>(LlmConfiguration.serializer()) {
override fun transformDeserialize(element: JsonElement): JsonElement {
if (element is JsonObject && "type" !in element) {
// If no type field, default to "openai"
return buildJsonObject {
put("type", "openai")
element.forEach { (key, value) ->
put(key, value)
}
}
}
return element
}
}
@Serializable
data class LlmConfigFile(
@Serializable(with = LlmConfigurationListSerializer::class)
val configs: List<LlmConfiguration>
)
object LlmConfigurationListSerializer : JsonTransformingSerializer<List<LlmConfiguration>>(
kotlinx.serialization.builtins.ListSerializer(LlmConfigurationSerializer)
) {
override fun transformDeserialize(element: JsonElement): JsonElement = element
}

View File

@ -1,29 +1,12 @@
package com.penumbraos.plugins.openai
package com.penumbraos.mabl.plugins.llm
import android.annotation.SuppressLint
import android.util.Log
import kotlinx.serialization.Serializable
import kotlinx.serialization.json.Json
import java.io.File
private const val TAG = "LlmConfigService"
@Serializable
data class LlmConfiguration(
val name: String,
val apiKey: String,
val model: String,
val maxTokens: Int = 1000,
val temperature: Double = 0.7,
val systemPrompt: String? = null,
val baseUrl: String
)
@Serializable
data class LlmConfigFile(
val configs: List<LlmConfiguration>
)
class LlmConfigManager {
private var configs: List<LlmConfiguration>? = null
@ -50,11 +33,16 @@ class LlmConfigManager {
val jsonString = configFile.readText()
val configFile = json.decodeFromString<LlmConfigFile>(jsonString)
val logMap = configFile.configs.map { config ->
val baseUrlInfo = if (config is LlmConfiguration.OpenAI) {
"Base URL: ${config.baseUrl}\n "
} else {
""
}
"""
Type: ${config.type}
Name: ${config.name}
Model: ${config.model}
Base URL: ${config.baseUrl}
Max Tokens: ${config.maxTokens}
${baseUrlInfo}Max Tokens: ${config.maxTokens}
Temperature: ${config.temperature}
""".trimIndent()
}

View File

@ -70,7 +70,7 @@ class AllControllers(coroutineScope: CoroutineScope, private val context: Contex
// In simulator mode, use more resilient connection approach
// Only connect to services that are known to work
try {
llm.connect(context, "com.penumbraos.plugins.openai")
llm.connect(context, "com.penumbraos.mabl.pinsim")
} catch (e: Exception) {
Log.w(TAG, "Failed to connect LLM service in simulator: $e")
}
@ -99,7 +99,7 @@ class AllControllers(coroutineScope: CoroutineScope, private val context: Contex
}
} else {
// Normal mode - connect to all external services
llm.connect(context, "com.penumbraos.plugins.openai")
llm.connect(context, "com.penumbraos.mabl.pin")
stt.connect(context, "com.penumbraos.plugins.demo")
tts.connect(context, "com.penumbraos.plugins.demo")
toolOrchestrator.connectAll()

View File

@ -1,8 +1,69 @@
package com.penumbraos.mabl.sound
import android.annotation.SuppressLint
import android.media.MediaPlayer
import android.util.Log
import java.io.File
private const val TAG = "SoundEffectManager"
class SoundEffectManager() {
private val tonePlayer = TonePlayer()
private val listeningMediaPlayer = MediaPlayer()
private var listeningMediaPlayerReady = false
@SuppressLint("SdCardPath")
private val listeningSoundEffectFile = File("/sdcard/penumbra/mabl/sounds/listening.mp3")
init {
try {
if (listeningSoundEffectFile.exists()) {
listeningMediaPlayer.setDataSource(listeningSoundEffectFile.absolutePath)
listeningMediaPlayer.setOnPreparedListener {
Log.d(TAG, "Loaded listening sound effect")
listeningMediaPlayerReady = true
}
listeningMediaPlayer.setOnErrorListener { player, what, extra ->
Log.e(TAG, "Error loading listening sound effect: $what, $extra")
false
}
listeningMediaPlayer.prepareAsync()
}
} catch (e: Exception) {
Log.e(TAG, "Failed to load listening sound effect", e)
}
}
fun playStartListeningEffect() {
tonePlayer.stop()
stopStartListeningEffect()
if (listeningSoundEffectFile.exists() && listeningMediaPlayerReady) {
listeningMediaPlayer.start()
} else {
val g4 = TonePlayer.SoundEvent(
doubleArrayOf(391.995),
200,
attackDurationMs = 50,
releaseDurationMs = 50
)
tonePlayer.playJingle(
listOf(g4)
)
}
}
fun stopStartListeningEffect() {
// TODO: This might cause clicking
tonePlayer.stop()
if (listeningMediaPlayer.isPlaying) {
listeningMediaPlayer.pause()
listeningMediaPlayer.seekTo(0)
}
}
fun playWaitingEffect() {
tonePlayer.stop()

View File

@ -1,5 +1,13 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<style name="Theme.MABL" parent="android:Theme.Material.Light.NoActionBar" />
<style name="Theme.MABL" parent="android:Theme.Material.Light.NoActionBar">
<item name="android:windowBackground">@color/black</item>
<item name="android:windowNoTitle">true</item>
<item name="android:windowContentOverlay">@null</item>
<item name="android:windowAnimationStyle">@null</item>
<item name="android:windowFullscreen">true</item>
<item name="android:windowActionBar">false</item>
<item name="android:windowSplashScreenAnimatedIcon">@android:color/transparent</item>
</style>
</resources>

View File

@ -15,6 +15,7 @@ import org.json.JSONArray
import org.json.JSONObject
import java.time.ZonedDateTime
import java.time.format.DateTimeFormatter
import java.time.format.FormatStyle
import java.util.Timer
import java.util.TimerTask
import java.util.concurrent.ConcurrentHashMap
@ -62,7 +63,7 @@ class TimeToolService : ToolService("TimeToolService") {
override fun executeTool(call: ToolCall, params: JSONObject?, callback: IToolCallback) {
when (call.name) {
GET_CURRENT_TIME_TOOL -> getCurrentTime(callback)
GET_CURRENT_TIME_TOOL -> getCurrentTime(callback, call.isLLM)
CREATE_TIMER_TOOL -> createTimer(params, callback)
LIST_TIMERS_TOOL -> listTimers(callback)
CANCEL_TIMER_TOOL -> cancelTimer(params, callback)
@ -78,6 +79,11 @@ class TimeToolService : ToolService("TimeToolService") {
ToolDefinition().apply {
name = GET_CURRENT_TIME_TOOL
description = "Get the current date and time"
examples = arrayOf(
"what time is it",
"current time",
"tell me the time"
)
parameters = emptyArray()
},
ToolDefinition().apply {
@ -150,19 +156,26 @@ class TimeToolService : ToolService("TimeToolService") {
)
}
private fun getCurrentTime(callback: IToolCallback) {
private fun getCurrentTime(callback: IToolCallback, isLLM: Boolean) {
val now = ZonedDateTime.now()
val isoFormat = now.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)
val timezone = now.zone.toString()
val result = """
if (isLLM) {
val isoFormat = now.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME)
val timezone = now.zone.toString()
val result = """
{
"datetime_iso": "$isoFormat",
"timezone": "$timezone"
}
""".trimIndent()
callback.onSuccess(result)
callback.onSuccess(result)
} else {
val time = DateTimeFormatter.ofLocalizedTime(FormatStyle.SHORT)
callback.onSuccess("It is ${now.format(time)}")
}
}
private fun createTimer(params: JSONObject?, callback: IToolCallback) {
@ -467,4 +480,4 @@ class TimeToolService : ToolService("TimeToolService") {
Log.e(TAG, "Failed to speak alert: ${e.message}")
}
}
}
}

View File

@ -39,8 +39,14 @@ class DemoSttService : MablService("DemoSttService") {
client.stt.initialize(object : SttRecognitionListener() {
override fun onError(error: Int) {
try {
currentCallback?.onError("Recognition error: $error")
} catch (e: RemoteException) {
// RecognitionError.ERROR_NO_MATCH
if (error == 7) {
Log.d("DemoSttService", "No speech recognized")
currentCallback?.onFinalTranscription("")
} else {
currentCallback?.onError("Recognition error: $error")
}
} catch (e: Exception) {
Log.e("DemoSttService", "Callback error", e)
}
}

View File

@ -1 +0,0 @@
/build

View File

@ -1,49 +0,0 @@
plugins {
alias(libs.plugins.android.application)
alias(libs.plugins.kotlin.android)
alias(libs.plugins.kotlin.serialization)
}
android {
namespace = "com.penumbraos.plugins.openai"
compileSdk = 35
defaultConfig {
applicationId = "com.penumbraos.plugins.openai"
minSdk = 32
targetSdk = 35
versionCode = (project.findProperty("versionCode") as String?)?.toIntOrNull() ?: 1
versionName = project.findProperty("versionName") as String? ?: "1.0"
}
buildTypes {
release {
isMinifyEnabled = false
signingConfig = signingConfigs.getByName("debug")
}
}
compileOptions {
sourceCompatibility = JavaVersion.VERSION_11
targetCompatibility = JavaVersion.VERSION_11
}
kotlinOptions {
jvmTarget = "11"
}
}
dependencies {
implementation(project(":sdk"))
implementation(libs.penumbraos.sdk)
implementation(libs.openai.client)
implementation(libs.ktor.client.android)
implementation(libs.kotlinx.serialization.json)
implementation(libs.kotlinx.coroutines.android)
implementation(libs.androidx.core.ktx)
implementation(libs.androidx.appcompat)
implementation(libs.material)
}

View File

@ -1,30 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:tools="http://schemas.android.com/tools"
xmlns:android="http://schemas.android.com/apk/res/android"
package="com.penumbraos.plugins.openai">
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission
android:name="android.permission.MANAGE_EXTERNAL_STORAGE"
tools:ignore="ScopedStorage" />
<application tools:ignore="MissingApplicationIcon">
<service
android:name=".OpenAiLlmService"
android:exported="true"
tools:ignore="ExportedService">
<intent-filter>
<action android:name="com.penumbraos.mabl.sdk.action.LLM_SERVICE" />
</intent-filter>
<meta-data
android:name="com.penumbraos.mabl.sdk.metadata.DISPLAY_NAME"
android:value="OpenAI LLM Service" />
<meta-data
android:name="com.penumbraos.mabl.sdk.metadata.DESCRIPTION"
android:value="OpenAI compatible language model service" />
</service>
</application>
</manifest>

View File

@ -1,328 +0,0 @@
@file:OptIn(ExperimentalEncodingApi::class)
package com.penumbraos.plugins.openai
import android.annotation.SuppressLint
import android.content.Intent
import android.os.IBinder
import android.system.Os
import android.system.OsConstants
import android.util.Log
import com.aallam.openai.api.chat.*
import com.aallam.openai.api.core.Parameters
import com.aallam.openai.api.model.ModelId
import com.aallam.openai.client.OpenAI
import com.aallam.openai.client.OpenAIHost
import com.penumbraos.mabl.sdk.BinderConversationMessage
import com.penumbraos.mabl.sdk.DeviceUtils
import com.penumbraos.mabl.sdk.ILlmCallback
import com.penumbraos.mabl.sdk.ILlmService
import com.penumbraos.mabl.sdk.LlmResponse
import com.penumbraos.mabl.sdk.MablService
import com.penumbraos.mabl.sdk.ToolCall
import com.penumbraos.mabl.sdk.ToolDefinition
import com.penumbraos.mabl.sdk.ToolParameter
import com.penumbraos.sdk.PenumbraClient
import com.penumbraos.sdk.http.ktor.HttpClientPlugin
import io.ktor.client.*
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.flow.collect
import kotlinx.coroutines.flow.onEach
import kotlinx.coroutines.launch
import kotlinx.serialization.Serializable
import kotlinx.serialization.json.Json
import java.io.ByteArrayOutputStream
import java.io.FileInputStream
import kotlin.io.encoding.Base64
import kotlin.io.encoding.ExperimentalEncodingApi
private const val TAG = "OpenAiLlmService"
@Serializable
data class ParameterSchema(
val type: String,
val properties: Map<String, PropertySchema> = emptyMap(),
val required: List<String> = emptyList()
)
@Serializable
data class PropertySchema(
val type: String,
val description: String,
val enum: List<String>? = null
)
private const val DEFAULT_PROMPT =
"""You are the MABL voice assistant. Your response will be spoken aloud to the user, so keep the response short and to the point.
|Your core responsibilities:
|1. Understand the user's request thoroughly.
|2. Identify which of the provided tools can best fulfill the request.
|3. Execute the tool(s) and provide a concise, accurate response based on the tool's output.
|4. If a tool is necessary to provide up-to-date or factual information (e.g., current news, real-time data), prioritize its use.
|5. Do NOT make up information. If a tool is required to get the answer, use it.
|6. If a query requires knowledge beyond your training data, especially for current events or news, the `web_search` tool is essential.
|7. Do not declare limitations (e.g., "I can only do X") if other relevant tools are available for the user's query. You have access to *all* provided tools.
|8. If no adequate tool is available, you are allowed to fall back on your own knowledge, but only when you have a high confidence of the answer."""
class OpenAiLlmService : MablService("OpenAiLlmService") {
private val llmScope = CoroutineScope(Dispatchers.IO)
private var openAI: OpenAI? = null
private val configManager = LlmConfigManager()
private var currentConfig: LlmConfiguration? = null
@SuppressLint("ForegroundServiceType")
override fun onCreate() {
super.onCreate()
llmScope.launch {
var client: PenumbraClient? = null
if (DeviceUtils.isAiPin()) {
client = PenumbraClient(this@OpenAiLlmService)
client.waitForBridge()
}
try {
currentConfig = configManager.getAvailableConfigs().first()
} catch (e: Exception) {
Log.e(TAG, "Failed to load LLM configuration", e)
}
if (currentConfig == null) {
Log.e(TAG, "No valid LLM configuration found")
return@launch
}
try {
Log.d(TAG, "About to create OpenAI client")
val apiKey = currentConfig!!.apiKey
val baseUrl = currentConfig!!.baseUrl
openAI =
OpenAI(
token = apiKey,
host = OpenAIHost(baseUrl),
httpClientConfig = {
if (DeviceUtils.isAiPin()) {
install(HttpClientPlugin) {
// Should have been initialized at start
penumbraClient = client!!
}
}
}
)
Log.w(
TAG,
"OpenAI client initialized successfully with model: ${currentConfig!!.model}"
)
} catch (e: Exception) {
Log.e(TAG, "Failed to initialize OpenAI client", e)
}
}
}
private var availableTools: List<Tool>? = null
private val binder = object : ILlmService.Stub() {
override fun setAvailableTools(tools: Array<ToolDefinition>) {
Log.d(TAG, "Received ${tools.size} tool definitions")
availableTools = convertToolDefinitionsToOpenAI(tools)
}
override fun generateResponse(
messages: Array<BinderConversationMessage>,
tools: Array<ToolDefinition>,
callback: ILlmCallback
) {
Log.w(
TAG,
"Submitting ${messages.size} conversation messages with ${tools.size} filtered tools. Last message: \"${messages.last().content}\""
)
if (openAI == null) {
Log.e(TAG, "OpenAI client not initialized")
callback.onError("OpenAI client not initialized. Check API key configuration.")
return
}
llmScope.launch {
try {
val conversationMessages = messages.map { message ->
when (message.type) {
"user" -> {
if (message.imageFile != null) {
val fileDescriptor = message.imageFile.fileDescriptor
// Rewind file descriptor so we can reuse them
// TODO: This somehow needs to live in MABL core
Os.lseek(
fileDescriptor,
0,
OsConstants.SEEK_SET
)
val imageBytes =
FileInputStream(fileDescriptor)
val byteArrayOutputStream = ByteArrayOutputStream()
val buffer = ByteArray(4096)
var bytesRead: Int
while (imageBytes.read(buffer).also { bytesRead = it } != -1) {
byteArrayOutputStream.write(buffer, 0, bytesRead)
}
val imageUrl =
Base64.Default.encode(byteArrayOutputStream.toByteArray())
ChatMessage(
role = ChatRole.User,
content = listOf(
TextPart(message.content),
ImagePart(url = "data:image/jpeg;base64,$imageUrl")
)
)
} else {
ChatMessage(
role = ChatRole.User,
content = message.content
)
}
}
"assistant" -> ChatMessage(
role = ChatRole.Assistant,
content = message.content,
toolCalls = message.toolCalls?.map { toolCall ->
function {
id = ToolId(toolCall.id)
function = FunctionCall(
toolCall.name,
toolCall.parameters
)
}
}
)
"tool" -> ChatMessage(
role = ChatRole.Tool,
content = message.content,
toolCallId = message.toolCallId?.let {
ToolId(
it
)
}
)
else -> ChatMessage(
role = ChatRole.User,
content = message.content
)
}
}
val chatMessages = listOf(
ChatMessage(
role = ChatRole.System,
content = currentConfig!!.systemPrompt
?: DEFAULT_PROMPT.trimMargin()
)
) + conversationMessages
val chatCompletionRequest = ChatCompletionRequest(
model = ModelId(currentConfig!!.model),
messages = chatMessages,
maxTokens = currentConfig!!.maxTokens,
temperature = currentConfig!!.temperature,
tools = convertToolDefinitionsToOpenAI(tools)
)
val responseBuilder = StringBuilder()
val toolCalls = mutableListOf<ToolCall>()
var messageCount = 0
val completions = openAI!!.chatCompletions(chatCompletionRequest)
completions.onEach { chunk: ChatCompletionChunk ->
Log.d(TAG, "Received chunk: $chunk")
messageCount += 1
chunk.choices.forEach { choice ->
choice.delta?.let { delta ->
delta.content?.let { content ->
responseBuilder.append(content)
callback.onPartialResponse(content)
}
delta.toolCalls?.forEach { toolCall ->
if (toolCall.function != null) {
val convertedToolCall = ToolCall().apply {
id = toolCall.id!!.id
name = toolCall.function!!.name
parameters = toolCall.function!!.arguments
isLLM = true
}
toolCalls.add(convertedToolCall)
}
}
}
}
}.collect()
// Send final response
val response = LlmResponse().apply {
text = responseBuilder.toString()
this.toolCalls = toolCalls.toTypedArray()
}
val flattenedCalls = toolCalls.joinToString {
"id: ${it.id}, name: ${it.name}, parameters: ${it.parameters}"
}
Log.w(
TAG,
"LLM response received: \"${response.text}\", $flattenedCalls"
)
callback.onCompleteResponse(response)
} catch (e: Exception) {
Log.e(TAG, "Error generating response", e)
callback.onError("Error generating response: ${e.message}")
}
}
}
}
private fun convertToolDefinitionsToOpenAI(toolDefinitions: Array<ToolDefinition>): List<Tool>? {
if (toolDefinitions.isEmpty()) {
return null
}
return toolDefinitions.map { toolDef ->
Tool.function(
name = toolDef.name,
description = toolDef.description,
parameters = convertParametersToOpenAI(toolDef.parameters)
)
}
}
private fun convertParametersToOpenAI(parameters: Array<ToolParameter>): Parameters {
val properties = parameters.associate { param ->
param.name to PropertySchema(
type = param.type,
description = param.description,
enum = if (param.enumValues.isNotEmpty()) param.enumValues.toList() else null
)
}
val required = parameters.filter { it.required }.map { it.name }
val schema = ParameterSchema(
type = "object",
properties = properties,
required = required
)
return Parameters.fromJsonString(Json.encodeToString(ParameterSchema.serializer(), schema))
}
override fun onBind(intent: Intent?): IBinder = binder
override fun onDestroy() {
super.onDestroy()
Log.d(TAG, "OpenAI LLM Service destroyed")
}
}

View File

@ -2,8 +2,23 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android">
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<application>
<service
android:name=".tool.NetworkService"
android:exported="true">
<intent-filter>
<action android:name="com.penumbraos.mabl.sdk.action.TOOL_SERVICE" />
</intent-filter>
<meta-data
android:name="com.penumbraos.mabl.sdk.metadata.DISPLAY_NAME"
android:value="Network Tool Service" />
<meta-data
android:name="com.penumbraos.mabl.sdk.metadata.DESCRIPTION"
android:value="Tool that provides access to device network information" />
</service>
<service
android:name=".tool.VolumeService"
android:exported="true">

View File

@ -0,0 +1,79 @@
package com.penumbraos.plugins.system.tool
import android.Manifest
import android.net.ConnectivityManager
import android.util.Log
import androidx.annotation.RequiresPermission
import com.penumbraos.mabl.sdk.IToolCallback
import com.penumbraos.mabl.sdk.ToolCall
import com.penumbraos.mabl.sdk.ToolDefinition
import com.penumbraos.mabl.sdk.ToolService
import org.json.JSONObject
private const val GET_IP = "get_ip"
private val IPv4_REGEX = """(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})(/\d{1,2})?""".toRegex()
class NetworkService : ToolService("NetworkService") {
@RequiresPermission(Manifest.permission.ACCESS_NETWORK_STATE)
override fun executeTool(
call: ToolCall,
params: JSONObject?,
callback: IToolCallback
) {
when (call.name) {
GET_IP -> {
val connectivityManager =
getSystemService(CONNECTIVITY_SERVICE) as ConnectivityManager?
if (connectivityManager == null) {
callback.onError("Failed to get network status")
return
}
val linkProperties =
connectivityManager.getLinkProperties(connectivityManager.activeNetwork)
if (linkProperties == null) {
callback.onError("Failed to get network status")
return
}
Log.d(
"NetworkService",
"Link properties: ${linkProperties.linkAddresses.map { it.toString() }}"
)
val address =
linkProperties.linkAddresses.map {
val result = IPv4_REGEX.matchEntire(it.toString())
result?.groups[1]?.value
}.firstOrNull()
if (address == null) {
callback.onError("Could not identify IP address")
return
}
callback.onSuccess("My IP address is $address")
}
}
}
override fun getToolDefinitions(): Array<ToolDefinition> {
return arrayOf(
ToolDefinition().apply {
name = GET_IP
description = "Get the IP address of the device"
examples = arrayOf(
"what is your IP address",
"what is your address",
"IP address",
"internet address",
"what is the IP"
)
}
)
}
}

View File

@ -21,7 +21,7 @@ class VolumeService : ToolService("VolumeService") {
when (call.name) {
GET_VOLUME -> {
val volume = getVolume()
callback.onSuccess(volume.toString())
callback.onSuccess("Device volume is $volume%")
}
SET_VOLUME -> {
@ -37,22 +37,26 @@ class VolumeService : ToolService("VolumeService") {
MUTE_VOLUME -> {
setMute(true)
callback.onSuccess("Volume muted")
}
UNMUTE_VOLUME -> {
setMute(false)
callback.onSuccess("Volume unmuted")
}
INCREASE_VOLUME -> {
val currentVolume = getVolume()
val newVolume = (currentVolume + 10).coerceAtMost(100)
setVolume(newVolume)
callback.onSuccess("Volume increased to $newVolume%")
}
DECREASE_VOLUME -> {
val currentVolume = getVolume()
val newVolume = (currentVolume - 10).coerceAtLeast(0)
setVolume(newVolume)
callback.onSuccess("Volume decreased to $newVolume%")
}
}
}
@ -61,6 +65,11 @@ class VolumeService : ToolService("VolumeService") {
return arrayOf(ToolDefinition().apply {
name = GET_VOLUME
description = "Get the current volume level"
examples = arrayOf(
"volume",
"what's the volume",
"current volume level"
)
parameters = emptyArray()
}, ToolDefinition().apply {
name = SET_VOLUME
@ -72,28 +81,52 @@ class VolumeService : ToolService("VolumeService") {
required = true
enumValues = emptyArray()
})
examples = emptyArray()
}, ToolDefinition().apply {
name = MUTE_VOLUME
description = "Mute the volume"
examples = arrayOf(
"mute the volume",
"mute the device",
"silence audio",
"turn sound off"
)
parameters = emptyArray()
}, ToolDefinition().apply {
name = UNMUTE_VOLUME
description = "Unmute the volume"
examples = arrayOf(
"unmute the volume",
"unmute the device",
"turn sound on"
)
parameters = emptyArray()
}, ToolDefinition().apply {
name = INCREASE_VOLUME
description = "Increase the volume by 10%"
examples = arrayOf(
"increase the volume",
"turn up the volume",
"turn it up"
)
parameters = emptyArray()
}, ToolDefinition().apply {
name = DECREASE_VOLUME
description = "Decrease the volume by 10%"
examples = arrayOf(
"decrease the volume",
"turn down the volume",
"turn it down"
)
parameters = emptyArray()
})
}
fun getVolume(): Int {
val audioManager = getSystemService(AUDIO_SERVICE) as AudioManager
return audioManager.getStreamVolume(AudioManager.STREAM_MUSIC)
val maxVolume = audioManager.getStreamMaxVolume(AudioManager.STREAM_MUSIC)
val volume = audioManager.getStreamVolume(AudioManager.STREAM_MUSIC)
return volume * 100 / maxVolume
}
fun setVolume(volume: Int) {