11 Commits

Author SHA1 Message Date
genki
6ce115baa9 Bradeth_v1
UI improvement sweep
Underlying 'train models' backend functionality, dao and room db.
Mlmodule Hilt DI
2026-01-07 00:44:11 -05:00
genki
6734c343cc TrainScreen / FacePicker / Sanity Checking input training data (dupes, multi faces) 2026-01-02 02:20:57 -05:00
genki
22c25d5ced TODO - end of time - need to revisit anlysis results window - broke it adding the uh faePicker (needs to go in AppRoutes) 2026-01-01 01:30:08 -05:00
genki
dba64b89b6 face detection + multi faces check
filtering before crop prompt - do we need to have user crop photos with only one face?
2026-01-01 01:02:42 -05:00
genki
3f15bfabc1 Cleaner - UI ALmost and Room Photo Ingestion 2025-12-26 01:26:51 -05:00
genki
0f7f4a4201 Cleaner - Needs UI rebuild from Master TBD 2025-12-25 22:18:58 -05:00
genki
0d34a2510b Mess - Crash on boot - Backend ?? 2025-12-25 00:40:57 -05:00
genki
c458e08075 Correct schema
Meaningful queries
Proper transactional reads
2025-12-24 22:48:34 -05:00
genki
c10cbf373f Working Gallery and Repo - Earlydays! 2025-12-20 18:27:09 -05:00
genki
91f6327c31 CheckPoint save for adding 'Tour' screen, and PhotoData and PhotoViewModels 2025-12-20 18:27:09 -05:00
genki
52fa755a3f Working Gallery and Repo - Earlydays! 2025-12-20 17:57:01 -05:00
72 changed files with 7484 additions and 272 deletions

1
.idea/.name generated Normal file
View File

@@ -0,0 +1 @@
SherpAI2

13
.idea/deviceManager.xml generated Normal file
View File

@@ -0,0 +1,13 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="DeviceTable">
<option name="columnSorters">
<list>
<ColumnSorterState>
<option name="column" value="Name" />
<option name="order" value="ASCENDING" />
</ColumnSorterState>
</list>
</option>
</component>
</project>

1
.idea/gradle.xml generated
View File

@@ -1,5 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<project version="4"> <project version="4">
<component name="GradleMigrationSettings" migrationVersion="1" />
<component name="GradleSettings"> <component name="GradleSettings">
<option name="linkedExternalProjectsSettings"> <option name="linkedExternalProjectsSettings">
<GradleProjectSettings> <GradleProjectSettings>

View File

@@ -0,0 +1,61 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="ComposePreviewDimensionRespectsLimit" enabled="true" level="WARNING" enabled_by_default="true">
<option name="composableFile" value="true" />
<option name="previewFile" value="true" />
</inspection_tool>
<inspection_tool class="ComposePreviewMustBeTopLevelFunction" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
<option name="previewFile" value="true" />
</inspection_tool>
<inspection_tool class="ComposePreviewNeedsComposableAnnotation" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
<option name="previewFile" value="true" />
</inspection_tool>
<inspection_tool class="ComposePreviewNotSupportedInUnitTestFiles" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
<option name="previewFile" value="true" />
</inspection_tool>
<inspection_tool class="GlancePreviewDimensionRespectsLimit" enabled="true" level="WARNING" enabled_by_default="true">
<option name="composableFile" value="true" />
</inspection_tool>
<inspection_tool class="GlancePreviewMustBeTopLevelFunction" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
</inspection_tool>
<inspection_tool class="GlancePreviewNeedsComposableAnnotation" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
</inspection_tool>
<inspection_tool class="GlancePreviewNotSupportedInUnitTestFiles" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
</inspection_tool>
<inspection_tool class="PreviewAnnotationInFunctionWithParameters" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
<option name="previewFile" value="true" />
</inspection_tool>
<inspection_tool class="PreviewApiLevelMustBeValid" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
<option name="previewFile" value="true" />
</inspection_tool>
<inspection_tool class="PreviewDeviceShouldUseNewSpec" enabled="true" level="WEAK WARNING" enabled_by_default="true">
<option name="composableFile" value="true" />
<option name="previewFile" value="true" />
</inspection_tool>
<inspection_tool class="PreviewFontScaleMustBeGreaterThanZero" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
<option name="previewFile" value="true" />
</inspection_tool>
<inspection_tool class="PreviewMultipleParameterProviders" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
<option name="previewFile" value="true" />
</inspection_tool>
<inspection_tool class="PreviewParameterProviderOnFirstParameter" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
<option name="previewFile" value="true" />
</inspection_tool>
<inspection_tool class="PreviewPickerAnnotation" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
<option name="previewFile" value="true" />
</inspection_tool>
</profile>
</component>

1
.idea/misc.xml generated
View File

@@ -1,4 +1,3 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4"> <project version="4">
<component name="ExternalStorageConfigurationManager" enabled="true" /> <component name="ExternalStorageConfigurationManager" enabled="true" />
<component name="ProjectRootManager" version="2" languageLevel="JDK_21" default="true" project-jdk-name="jbr-21" project-jdk-type="JavaSDK"> <component name="ProjectRootManager" version="2" languageLevel="JDK_21" default="true" project-jdk-name="jbr-21" project-jdk-type="JavaSDK">

View File

@@ -0,0 +1,4 @@
kotlin version: 2.0.21
error message: The daemon has terminated unexpectedly on startup attempt #1 with error code: 0. The daemon process output:
1. Kotlin compile daemon is ready

View File

@@ -1,75 +1,88 @@
// build.gradle.kts (Module: :app)
plugins { plugins {
// 1. Core Android and Kotlin plugins (MUST be first) alias(libs.plugins.android.application)
id("com.android.application") alias(libs.plugins.kotlin.android)
kotlin("android") alias(libs.plugins.kotlin.compose)
alias(libs.plugins.ksp)
id("org.jetbrains.kotlin.plugin.compose") // Note: No version is specified here alias(libs.plugins.hilt.android)
} }
android { android {
// 2. Android Configuration
namespace = "com.placeholder.sherpai2" namespace = "com.placeholder.sherpai2"
compileSdk = 34 compileSdk = 35
defaultConfig { defaultConfig {
applicationId = "com.placeholder.sherpai2" applicationId = "com.placeholder.sherpai2"
minSdk = 24 minSdk = 25
targetSdk = 34 targetSdk = 35
versionCode = 1 versionCode = 1
versionName = "1.0" versionName = "1.0"
testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner" testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner"
} }
// 3. Kotlin & Java Settings buildTypes {
compileOptions { release {
sourceCompatibility = JavaVersion.VERSION_1_8 isMinifyEnabled = false
targetCompatibility = JavaVersion.VERSION_1_8 proguardFiles(getDefaultProguardFile("proguard-android-optimize.txt"), "proguard-rules.pro")
} }
kotlinOptions { }
jvmTarget = "1.8"
compileOptions {
sourceCompatibility = JavaVersion.VERSION_11
targetCompatibility = JavaVersion.VERSION_11
}
kotlinOptions {
jvmTarget = "11"
} }
// 4. Jetpack Compose Configuration (Crucial!)
buildFeatures { buildFeatures {
compose = true compose = true
} }
composeOptions {
kotlinCompilerExtensionVersion = "1.5.8" // Must match your Kotlin version
}
} }
dependencies { dependencies {
// --- CORE ANDROID & LIFECYCLE --- // Core & Lifecycle
implementation("androidx.core:core-ktx:1.12.0") implementation(libs.androidx.core.ktx)
implementation("androidx.lifecycle:lifecycle-runtime-compose:2.7.0") implementation(libs.androidx.lifecycle.runtime.ktx)
implementation("androidx.activity:activity-compose:1.8.2") // Fixes 'activity' ref error implementation(libs.androidx.lifecycle.viewmodel.compose)
implementation(libs.androidx.activity.compose)
// --- JETPACK COMPOSE UI (Material 3) --- // Compose
implementation("androidx.compose.ui:ui") implementation(platform(libs.androidx.compose.bom))
implementation("androidx.compose.ui:ui-graphics") implementation(libs.androidx.compose.ui)
implementation("androidx.compose.ui:ui-tooling-preview") implementation(libs.androidx.compose.ui.graphics)
implementation("androidx.compose.material3:material3") // Fixes 'material3' ref error implementation(libs.androidx.compose.ui.tooling.preview)
implementation(libs.androidx.compose.material3)
implementation(libs.androidx.compose.material.icons)
debugImplementation(libs.androidx.compose.ui.tooling)
// --- COMPOSE ICONS (Fixes 'material' and 'Icons' ref errors) --- // Hilt DI
// Uses direct string to avoid Version Catalog conflicts implementation(libs.hilt.android)
implementation("androidx.compose.material:material-icons-extended:1.6.0") ksp(libs.hilt.compiler)
implementation(libs.androidx.hilt.navigation.compose)
// --- STATE MANAGEMENT / COROUTINES --- // Navigation
implementation("androidx.lifecycle:lifecycle-viewmodel-compose:2.7.0") implementation(libs.androidx.navigation.compose)
implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.7.3")
implementation("org.jetbrains.kotlinx:kotlinx-coroutines-android:1.7.3")
// --- TESTING --- // Room Database
testImplementation("junit:junit:4.13.2") implementation(libs.room.runtime)
androidTestImplementation("androidx.test.ext:junit:1.1.5") implementation(libs.room.ktx)
androidTestImplementation("androidx.test.espresso:espresso-core:3.5.1") ksp(libs.room.compiler)
androidTestImplementation("androidx.compose.ui:ui-test-junit4")
debugImplementation("androidx.compose.ui:ui-tooling")
debugImplementation("androidx.compose.ui:ui-test-manifest")
implementation("androidx.compose.foundation:foundation:1.6.0") // Use your current Compose version // Coil Images
implementation("androidx.compose.material3:material3:1.2.1") // <-- Fix/Reconfirm Material 3 implementation(libs.coil.compose)
// ML Kit
implementation(libs.mlkit.face.detection)
implementation(libs.kotlinx.coroutines.play.services)
//Face Rec
implementation(libs.tensorflow.lite)
implementation(libs.tensorflow.lite.support)
// Optional: GPU acceleration
implementation(libs.tensorflow.lite.gpu)
// Gson for storing FloatArrays in Room
implementation(libs.gson)
} }

View File

@@ -10,7 +10,8 @@
android:label="@string/app_name" android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round" android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true" android:supportsRtl="true"
android:theme="@style/Theme.SherpAI2"> android:theme="@style/Theme.SherpAI2"
android:name=".SherpAIApplication">
<activity <activity
android:name=".MainActivity" android:name=".MainActivity"
android:exported="true" android:exported="true"
@@ -23,5 +24,6 @@
</intent-filter> </intent-filter>
</activity> </activity>
</application> </application>
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" android:maxSdkVersion="32" />
<uses-permission android:name="android.permission.READ_MEDIA_IMAGES" />
</manifest> </manifest>

View File

@@ -1,27 +1,84 @@
package com.placeholder.sherpai2 package com.placeholder.sherpai2
import android.Manifest
import android.content.pm.PackageManager
import android.os.Build
import android.os.Bundle import android.os.Bundle
import androidx.activity.ComponentActivity import androidx.activity.ComponentActivity
import androidx.activity.compose.rememberLauncherForActivityResult
import androidx.activity.compose.setContent import androidx.activity.compose.setContent
import androidx.activity.result.contract.ActivityResultContracts
import androidx.compose.foundation.layout.Box
import androidx.compose.foundation.layout.fillMaxSize import androidx.compose.foundation.layout.fillMaxSize
import androidx.compose.material3.MaterialTheme import androidx.compose.material3.Text
import androidx.compose.material3.Surface import androidx.compose.runtime.*
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier import androidx.compose.ui.Modifier
import com.placeholder.sherpai2.presentation.MainScreen // IMPORT your main screen import androidx.core.content.ContextCompat
import com.placeholder.sherpai2.domain.repository.ImageRepository
import com.placeholder.sherpai2.ui.presentation.MainScreen
import com.placeholder.sherpai2.ui.theme.SherpAI2Theme
import dagger.hilt.android.AndroidEntryPoint
import kotlinx.coroutines.launch
import javax.inject.Inject
@AndroidEntryPoint
class MainActivity : ComponentActivity() { class MainActivity : ComponentActivity() {
@Inject
lateinit var imageRepository: ImageRepository
override fun onCreate(savedInstanceState: Bundle?) { override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState) super.onCreate(savedInstanceState)
// Determine storage permission based on Android version
val storagePermission = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU) {
Manifest.permission.READ_MEDIA_IMAGES
} else {
@Suppress("DEPRECATION")
Manifest.permission.READ_EXTERNAL_STORAGE
}
setContent { setContent {
// Assume you have a Theme file named SherpAI2Theme (standard for new projects) SherpAI2Theme {
// Replace with your actual project theme if different var hasPermission by remember {
MaterialTheme { mutableStateOf(
Surface( ContextCompat.checkSelfPermission(this, storagePermission) ==
modifier = Modifier.fillMaxSize(), PackageManager.PERMISSION_GRANTED
color = MaterialTheme.colorScheme.background )
) { }
// Launch the main navigation UI
// Track ingestion completion
var imagesIngested by remember { mutableStateOf(false) }
// Launcher for permission request
val permissionLauncher = rememberLauncherForActivityResult(
ActivityResultContracts.RequestPermission()
) { granted ->
hasPermission = granted
}
// Trigger ingestion once permission is granted
LaunchedEffect(hasPermission) {
if (hasPermission) {
// Suspend until ingestion completes
imageRepository.ingestImages()
imagesIngested = true
} else {
permissionLauncher.launch(storagePermission)
}
}
// Gate UI until permission granted AND ingestion completed
if (hasPermission && imagesIngested) {
MainScreen() MainScreen()
} else {
Box(
modifier = Modifier.fillMaxSize(),
contentAlignment = Alignment.Center
) {
Text("Please grant storage permission to continue.")
}
} }
} }
} }

View File

@@ -0,0 +1,7 @@
package com.placeholder.sherpai2
import android.app.Application
import dagger.hilt.android.HiltAndroidApp
@HiltAndroidApp
class SherpAIApplication : Application()

View File

@@ -0,0 +1,51 @@
package com.placeholder.sherpai2.data.local
import androidx.room.Database
import androidx.room.RoomDatabase
import com.placeholder.sherpai2.data.local.dao.*
import com.placeholder.sherpai2.data.local.entity.*
/**
* AppDatabase - Complete database for SherpAI2
*
* ENTITIES:
* - YOUR EXISTING: Image, Tag, Event, junction tables
* - NEW: PersonEntity (people in your app)
* - NEW: FaceModelEntity (face embeddings, links to PersonEntity)
* - NEW: PhotoFaceTagEntity (face detections, links to ImageEntity + FaceModelEntity)
*/
@Database(
entities = [
// ===== YOUR EXISTING ENTITIES =====
ImageEntity::class,
TagEntity::class,
EventEntity::class,
ImageTagEntity::class,
ImagePersonEntity::class,
ImageEventEntity::class,
// ===== NEW ENTITIES =====
PersonEntity::class, // NEW: People
FaceModelEntity::class, // NEW: Face embeddings
PhotoFaceTagEntity::class // NEW: Face tags
],
version = 3,
exportSchema = false
)
// No TypeConverters needed - embeddings stored as strings
abstract class AppDatabase : RoomDatabase() {
// ===== YOUR EXISTING DAOs =====
abstract fun imageDao(): ImageDao
abstract fun tagDao(): TagDao
abstract fun eventDao(): EventDao
abstract fun imageTagDao(): ImageTagDao
abstract fun imagePersonDao(): ImagePersonDao
abstract fun imageEventDao(): ImageEventDao
abstract fun imageAggregateDao(): ImageAggregateDao
// ===== NEW DAOs =====
abstract fun personDao(): PersonDao // NEW: Manage people
abstract fun faceModelDao(): FaceModelDao // NEW: Manage face embeddings
abstract fun photoFaceTagDao(): PhotoFaceTagDao // NEW: Manage face tags
}

View File

@@ -0,0 +1,26 @@
package com.placeholder.sherpai2.data.local.dao
import androidx.room.Dao
import androidx.room.Insert
import androidx.room.OnConflictStrategy
import androidx.room.Query
import com.placeholder.sherpai2.data.local.entity.EventEntity
@Dao
interface EventDao {
@Insert(onConflict = OnConflictStrategy.REPLACE)
suspend fun insert(event: EventEntity)
/**
* Find events covering a timestamp.
*
* This is the backbone of auto-tagging by date.
*/
@Query("""
SELECT * FROM events
WHERE :timestamp BETWEEN startDate AND endDate
AND isHidden = 0
""")
suspend fun findEventsForTimestamp(timestamp: Long): List<EventEntity>
}

View File

@@ -0,0 +1,44 @@
package com.placeholder.sherpai2.data.local.dao
import androidx.room.*
import kotlinx.coroutines.flow.Flow
import com.placeholder.sherpai2.data.local.entity.FaceModelEntity
/**
* FaceModelDao - Manages face recognition models
*
* PRIMARY KEY TYPE: String (UUID)
* FOREIGN KEY: personId (String)
*/
@Dao
interface FaceModelDao {
@Insert(onConflict = OnConflictStrategy.REPLACE)
suspend fun insertFaceModel(faceModel: FaceModelEntity): Long // Row ID
@Update
suspend fun updateFaceModel(faceModel: FaceModelEntity)
@Query("UPDATE face_models SET lastUsed = :timestamp WHERE id = :faceModelId")
suspend fun updateLastUsed(faceModelId: String, timestamp: Long)
@Query("SELECT * FROM face_models WHERE id = :faceModelId")
suspend fun getFaceModelById(faceModelId: String): FaceModelEntity?
@Query("SELECT * FROM face_models WHERE personId = :personId AND isActive = 1")
suspend fun getFaceModelByPersonId(personId: String): FaceModelEntity?
@Query("SELECT * FROM face_models WHERE isActive = 1 ORDER BY lastUsed DESC")
suspend fun getAllActiveFaceModels(): List<FaceModelEntity>
@Query("SELECT * FROM face_models WHERE isActive = 1 ORDER BY lastUsed DESC")
fun getAllActiveFaceModelsFlow(): Flow<List<FaceModelEntity>>
@Query("DELETE FROM face_models WHERE id = :faceModelId")
suspend fun deleteFaceModelById(faceModelId: String)
@Query("UPDATE face_models SET isActive = 0 WHERE id = :faceModelId")
suspend fun deactivateFaceModel(faceModelId: String)
@Query("SELECT COUNT(*) FROM face_models WHERE isActive = 1")
suspend fun getActiveFaceModelCount(): Int
}

View File

@@ -0,0 +1,48 @@
package com.placeholder.sherpai2.data.local.dao
import androidx.room.Dao
import androidx.room.Query
import androidx.room.Transaction
import com.placeholder.sherpai2.data.local.model.ImageWithEverything
import kotlinx.coroutines.flow.Flow
@Dao
interface ImageAggregateDao {
/**
* Observe a fully-hydrated image object.
*/
@Transaction
@Query("""
SELECT * FROM images
WHERE imageId = :imageId
""")
fun observeImageWithEverything(
imageId: String
): Flow<ImageWithEverything>
/**
* Observe all images.
*/
@Transaction
@Query("""
SELECT * FROM images
ORDER BY capturedAt DESC
""")
fun observeAllImagesWithEverything(): Flow<List<ImageWithEverything>>
/**
* Observe images filtered by tag value.
*
* Joins images -> image_tags -> tags
*/
@Transaction
@Query("""
SELECT images.* FROM images
INNER JOIN image_tags ON images.imageId = image_tags.imageId
INNER JOIN tags ON tags.tagId = image_tags.tagId
WHERE tags.value = :tag
ORDER BY images.capturedAt DESC
""")
fun observeImagesWithTag(tag: String): Flow<List<ImageWithEverything>>
}

View File

@@ -0,0 +1,75 @@
package com.placeholder.sherpai2.data.local.dao
import androidx.room.Dao
import androidx.room.Insert
import androidx.room.OnConflictStrategy
import androidx.room.Query
import androidx.room.Transaction
import com.placeholder.sherpai2.data.local.entity.ImageEntity
import com.placeholder.sherpai2.data.local.model.ImageWithEverything
import kotlinx.coroutines.flow.Flow
@Dao
interface ImageDao {
/**
* Insert images.
*
* IGNORE prevents duplicate insertion
* when sha256 or imageUri already exists.
*/
@Insert(onConflict = OnConflictStrategy.IGNORE)
suspend fun insertImages(images: List<ImageEntity>)
/**
* Get image by ID.
*/
@Query("SELECT * FROM images WHERE imageId = :imageId")
suspend fun getImageById(imageId: String): ImageEntity?
/**
* Stream images ordered by capture time (newest first).
*
* Flow is critical:
* - UI auto-updates
* - No manual refresh
*/
@Query("""
SELECT * FROM images
ORDER BY capturedAt DESC
""")
fun observeAllImages(): Flow<List<ImageEntity>>
/**
* Fetch images in a time range.
* Used for:
* - event auto-assignment
* - timeline views
*/
@Query("""
SELECT * FROM images
WHERE capturedAt BETWEEN :start AND :end
ORDER BY capturedAt ASC
""")
suspend fun getImagesInRange(
start: Long,
end: Long
): List<ImageEntity>
@Transaction
@Query("SELECT * FROM images ORDER BY capturedAt DESC LIMIT :limit")
fun getRecentImages(limit: Int): Flow<List<ImageWithEverything>>
@Query("SELECT COUNT(*) > 0 FROM images WHERE sha256 = :sha256")
suspend fun existsBySha256(sha256: String): Boolean
@Insert(onConflict = OnConflictStrategy.IGNORE)
suspend fun insert(image: ImageEntity)
/**
* Get images by list of IDs.
* FIXED: Changed from List<Long> to List<String> to match ImageEntity.imageId type
*/
@Query("SELECT * FROM images WHERE imageId IN (:imageIds)")
suspend fun getImagesByIds(imageIds: List<String>): List<ImageEntity>
}

View File

@@ -0,0 +1,23 @@
package com.placeholder.sherpai2.data.local.dao
import androidx.room.Dao
import androidx.room.Insert
import androidx.room.OnConflictStrategy
import androidx.room.Query
import com.placeholder.sherpai2.data.local.entity.ImageEventEntity
@Dao
interface ImageEventDao {
@Insert(onConflict = OnConflictStrategy.REPLACE)
suspend fun upsert(entity: ImageEventEntity)
/**
* Images associated with an event.
*/
@Query("""
SELECT imageId FROM image_events
WHERE eventId = :eventId
""")
suspend fun findImagesForEvent(eventId: String): List<String>
}

View File

@@ -0,0 +1,25 @@
package com.placeholder.sherpai2.data.local.dao
import androidx.room.Dao
import androidx.room.Insert
import androidx.room.OnConflictStrategy
import androidx.room.Query
import com.placeholder.sherpai2.data.local.entity.ImagePersonEntity
@Dao
interface ImagePersonDao {
@Insert(onConflict = OnConflictStrategy.REPLACE)
suspend fun upsert(entity: ImagePersonEntity)
/**
* All images containing a specific person.
*/
@Query("""
SELECT imageId FROM image_persons
WHERE personId = :personId
AND visibility = 'PUBLIC'
AND confirmed = 1
""")
suspend fun findImagesForPerson(personId: String): List<String>
}

View File

@@ -0,0 +1,53 @@
package com.placeholder.sherpai2.data.local.dao
import androidx.room.Dao
import androidx.room.Insert
import androidx.room.OnConflictStrategy
import androidx.room.Query
import androidx.room.Transaction
import com.placeholder.sherpai2.data.local.entity.ImageTagEntity
import com.placeholder.sherpai2.data.local.entity.TagEntity
import kotlinx.coroutines.flow.Flow
@Dao
interface ImageTagDao {
@Insert(onConflict = OnConflictStrategy.REPLACE)
suspend fun upsert(imageTag: ImageTagEntity)
/**
* Observe tags for an image.
*/
@Query("""
SELECT * FROM image_tags
WHERE imageId = :imageId
AND visibility != 'HIDDEN'
""")
fun observeTagsForImage(imageId: String): Flow<List<ImageTagEntity>>
/**
* Find images by tag.
*
* This is your primary tag-search query.
*/
@Query("""
SELECT imageId FROM image_tags
WHERE tagId = :tagId
AND visibility = 'PUBLIC'
AND confidence >= :minConfidence
""")
suspend fun findImagesByTag(
tagId: String,
minConfidence: Float = 0.5f
): List<String>
@Transaction
@Query("""
SELECT t.*
FROM tags t
INNER JOIN image_tags it ON t.tagId = it.tagId
WHERE it.imageId = :imageId AND it.visibility = 'PUBLIC'
""")
fun getTagsForImage(imageId: String): Flow<List<TagEntity>>
}

View File

@@ -0,0 +1,53 @@
package com.placeholder.sherpai2.data.local.dao
import androidx.room.*
import com.placeholder.sherpai2.data.local.entity.PersonEntity
import kotlinx.coroutines.flow.Flow
/**
* PersonDao - Data access for PersonEntity
*
* PRIMARY KEY TYPE: String (UUID)
*/
@Dao
interface PersonDao {
@Insert(onConflict = OnConflictStrategy.REPLACE)
suspend fun insert(person: PersonEntity): Long // Room still returns row ID as Long
@Insert(onConflict = OnConflictStrategy.REPLACE)
suspend fun insertAll(persons: List<PersonEntity>)
@Update
suspend fun update(person: PersonEntity)
@Query("UPDATE persons SET updatedAt = :timestamp WHERE id = :personId")
suspend fun updateTimestamp(personId: String, timestamp: Long = System.currentTimeMillis())
@Delete
suspend fun delete(person: PersonEntity)
@Query("DELETE FROM persons WHERE id = :personId")
suspend fun deleteById(personId: String)
@Query("SELECT * FROM persons WHERE id = :personId")
suspend fun getPersonById(personId: String): PersonEntity?
@Query("SELECT * FROM persons WHERE id IN (:personIds)")
suspend fun getPersonsByIds(personIds: List<String>): List<PersonEntity>
@Query("SELECT * FROM persons ORDER BY name ASC")
suspend fun getAllPersons(): List<PersonEntity>
@Query("SELECT * FROM persons ORDER BY name ASC")
fun getAllPersonsFlow(): Flow<List<PersonEntity>>
@Query("SELECT * FROM persons WHERE name LIKE '%' || :query || '%' ORDER BY name ASC")
suspend fun searchByName(query: String): List<PersonEntity>
@Query("SELECT COUNT(*) FROM persons")
suspend fun getPersonCount(): Int
@Query("SELECT EXISTS(SELECT 1 FROM persons WHERE id = :personId)")
suspend fun personExists(personId: String): Boolean
}

View File

@@ -0,0 +1,91 @@
package com.placeholder.sherpai2.data.local.dao
import androidx.room.*
import kotlinx.coroutines.flow.Flow
import com.placeholder.sherpai2.data.local.entity.PhotoFaceTagEntity
/**
* PhotoFaceTagDao - Manages face tags in photos
*
* PRIMARY KEY TYPE: String (UUID)
* FOREIGN KEYS: imageId (String), faceModelId (String)
*/
@Dao
interface PhotoFaceTagDao {
@Insert(onConflict = OnConflictStrategy.REPLACE)
suspend fun insertTag(tag: PhotoFaceTagEntity): Long // Row ID
@Insert(onConflict = OnConflictStrategy.REPLACE)
suspend fun insertTags(tags: List<PhotoFaceTagEntity>)
@Update
suspend fun updateTag(tag: PhotoFaceTagEntity)
@Query("UPDATE photo_face_tags SET verifiedByUser = 1, verifiedAt = :timestamp WHERE id = :tagId")
suspend fun markTagAsVerified(tagId: String, timestamp: Long = System.currentTimeMillis())
// ===== QUERY BY IMAGE =====
@Query("SELECT * FROM photo_face_tags WHERE imageId = :imageId")
suspend fun getTagsForImage(imageId: String): List<PhotoFaceTagEntity>
@Query("SELECT COUNT(*) FROM photo_face_tags WHERE imageId = :imageId")
suspend fun getFaceCountForImage(imageId: String): Int
@Query("SELECT EXISTS(SELECT 1 FROM photo_face_tags WHERE imageId = :imageId AND faceModelId = :faceModelId)")
suspend fun imageHasPerson(imageId: String, faceModelId: String): Boolean
// ===== QUERY BY FACE MODEL =====
@Query("SELECT DISTINCT imageId FROM photo_face_tags WHERE faceModelId = :faceModelId ORDER BY detectedAt DESC")
suspend fun getImageIdsForFaceModel(faceModelId: String): List<String>
@Query("SELECT DISTINCT imageId FROM photo_face_tags WHERE faceModelId = :faceModelId ORDER BY detectedAt DESC")
fun getImageIdsForFaceModelFlow(faceModelId: String): Flow<List<String>>
@Query("SELECT faceModelId, COUNT(DISTINCT imageId) as photoCount FROM photo_face_tags GROUP BY faceModelId")
suspend fun getPhotoCountPerFaceModel(): List<FaceModelPhotoCount>
@Query("SELECT * FROM photo_face_tags WHERE faceModelId = :faceModelId ORDER BY detectedAt DESC")
suspend fun getAllTagsForFaceModel(faceModelId: String): List<PhotoFaceTagEntity>
// ===== DELETE =====
@Delete
suspend fun deleteTag(tag: PhotoFaceTagEntity)
@Query("DELETE FROM photo_face_tags WHERE id = :tagId")
suspend fun deleteTagById(tagId: String)
@Query("DELETE FROM photo_face_tags WHERE faceModelId = :faceModelId")
suspend fun deleteTagsForFaceModel(faceModelId: String)
@Query("DELETE FROM photo_face_tags WHERE imageId = :imageId")
suspend fun deleteTagsForImage(imageId: String)
// ===== STATISTICS =====
@Query("SELECT * FROM photo_face_tags WHERE confidence < :threshold ORDER BY confidence ASC")
suspend fun getLowConfidenceTags(threshold: Float = 0.7f): List<PhotoFaceTagEntity>
@Query("SELECT * FROM photo_face_tags WHERE verifiedByUser = 0 ORDER BY detectedAt DESC")
suspend fun getUnverifiedTags(): List<PhotoFaceTagEntity>
@Query("SELECT COUNT(*) FROM photo_face_tags WHERE verifiedByUser = 0")
suspend fun getUnverifiedTagCount(): Int
@Query("SELECT AVG(confidence) FROM photo_face_tags WHERE faceModelId = :faceModelId")
suspend fun getAverageConfidenceForFaceModel(faceModelId: String): Float?
@Query("SELECT * FROM photo_face_tags ORDER BY detectedAt DESC LIMIT :limit")
suspend fun getRecentlyDetectedFaces(limit: Int = 20): List<PhotoFaceTagEntity>
}
/**
* Simple data class for photo counts
*/
data class FaceModelPhotoCount(
val faceModelId: String,
val photoCount: Int
)

View File

@@ -0,0 +1,24 @@
package com.placeholder.sherpai2.data.local.dao
import androidx.room.Dao
import androidx.room.Insert
import androidx.room.OnConflictStrategy
import androidx.room.Query
import com.placeholder.sherpai2.data.local.entity.TagEntity
@Dao
interface TagDao {
@Insert(onConflict = OnConflictStrategy.IGNORE)
suspend fun insert(tag: TagEntity)
/**
* Resolve a tag by value.
* Example: "park"
*/
@Query("SELECT * FROM tags WHERE value = :value LIMIT 1")
suspend fun getByValue(value: String): TagEntity?
@Query("SELECT * FROM tags")
suspend fun getAll(): List<TagEntity>
}

View File

@@ -0,0 +1,44 @@
package com.placeholder.sherpai2.data.local.entity
import androidx.room.Entity
import androidx.room.Index
import androidx.room.PrimaryKey
/**
* Represents a meaningful event spanning a time range.
*
* Events allow auto-association of images by timestamp.
*/
@Entity(
tableName = "events",
indices = [
Index(value = ["startDate"]),
Index(value = ["endDate"])
]
)
data class EventEntity(
@PrimaryKey
val eventId: String,
val name: String,
/**
* Inclusive start date (UTC millis).
*/
val startDate: Long,
/**
* Inclusive end date (UTC millis).
*/
val endDate: Long,
val location: String?,
/**
* 0.0 1.0 importance weight
*/
val importance: Float,
val isHidden: Boolean
)

View File

@@ -0,0 +1,155 @@
package com.placeholder.sherpai2.data.local.entity
import androidx.room.Entity
import androidx.room.ForeignKey
import androidx.room.Index
import androidx.room.PrimaryKey
import java.util.UUID
/**
* PersonEntity - Represents a person in the face recognition system
*
* TABLE: persons
* PRIMARY KEY: id (String)
*/
@Entity(
tableName = "persons",
indices = [
Index(value = ["name"])
]
)
data class PersonEntity(
@PrimaryKey
val id: String = UUID.randomUUID().toString(),
val name: String,
val createdAt: Long = System.currentTimeMillis(),
val updatedAt: Long = System.currentTimeMillis()
)
/**
* FaceModelEntity - Stores face recognition model (embedding) for a person
*
* TABLE: face_models
* FOREIGN KEY: personId → persons.id
*/
@Entity(
tableName = "face_models",
foreignKeys = [
ForeignKey(
entity = PersonEntity::class,
parentColumns = ["id"],
childColumns = ["personId"],
onDelete = ForeignKey.CASCADE
)
],
indices = [
Index(value = ["personId"], unique = true)
]
)
data class FaceModelEntity(
@PrimaryKey
val id: String = UUID.randomUUID().toString(),
val personId: String,
val embedding: String, // Serialized FloatArray
val trainingImageCount: Int,
val averageConfidence: Float,
val createdAt: Long = System.currentTimeMillis(),
val updatedAt: Long = System.currentTimeMillis(),
val lastUsed: Long? = null,
val isActive: Boolean = true
) {
companion object {
fun create(
personId: String,
embeddingArray: FloatArray,
trainingImageCount: Int,
averageConfidence: Float
): FaceModelEntity {
return FaceModelEntity(
personId = personId,
embedding = embeddingArray.joinToString(","),
trainingImageCount = trainingImageCount,
averageConfidence = averageConfidence
)
}
}
fun getEmbeddingArray(): FloatArray {
return embedding.split(",").map { it.toFloat() }.toFloatArray()
}
}
/**
* PhotoFaceTagEntity - Links detected faces in photos to person models
*
* TABLE: photo_face_tags
* FOREIGN KEYS:
* - imageId → images.imageId (String)
* - faceModelId → face_models.id (String)
*/
@Entity(
tableName = "photo_face_tags",
foreignKeys = [
ForeignKey(
entity = ImageEntity::class,
parentColumns = ["imageId"],
childColumns = ["imageId"],
onDelete = ForeignKey.CASCADE
),
ForeignKey(
entity = FaceModelEntity::class,
parentColumns = ["id"],
childColumns = ["faceModelId"],
onDelete = ForeignKey.CASCADE
)
],
indices = [
Index(value = ["imageId"]),
Index(value = ["faceModelId"]),
Index(value = ["imageId", "faceModelId"])
]
)
data class PhotoFaceTagEntity(
@PrimaryKey
val id: String = UUID.randomUUID().toString(),
val imageId: String, // String to match ImageEntity.imageId
val faceModelId: String,
val boundingBox: String, // "left,top,right,bottom"
val confidence: Float,
val embedding: String, // Serialized FloatArray
val detectedAt: Long = System.currentTimeMillis(),
val verifiedByUser: Boolean = false,
val verifiedAt: Long? = null
) {
companion object {
fun create(
imageId: String,
faceModelId: String,
boundingBox: android.graphics.Rect,
confidence: Float,
faceEmbedding: FloatArray
): PhotoFaceTagEntity {
return PhotoFaceTagEntity(
imageId = imageId,
faceModelId = faceModelId,
boundingBox = "${boundingBox.left},${boundingBox.top},${boundingBox.right},${boundingBox.bottom}",
confidence = confidence,
embedding = faceEmbedding.joinToString(",")
)
}
}
fun getBoundingBox(): android.graphics.Rect {
val parts = boundingBox.split(",").map { it.toInt() }
return android.graphics.Rect(parts[0], parts[1], parts[2], parts[3])
}
fun getEmbeddingArray(): FloatArray {
return embedding.split(",").map { it.toFloat() }.toFloatArray()
}
}

View File

@@ -0,0 +1,55 @@
package com.placeholder.sherpai2.data.local.entity
import androidx.room.Entity
import androidx.room.Index
import androidx.room.PrimaryKey
/**
* Represents a single image on the device.
*
* This entity is intentionally immutable:
* - imageUri identifies where the image lives
* - sha256 prevents duplicates
* - capturedAt is the EXIF timestamp
*
* This table should be append-only.
*/
@Entity(
tableName = "images",
indices = [
Index(value = ["imageUri"], unique = true),
Index(value = ["sha256"], unique = true),
Index(value = ["capturedAt"])
]
)
data class ImageEntity(
@PrimaryKey
val imageId: String,
val imageUri: String,
/**
* Cryptographic hash of image bytes.
* Used for deduplication and re-indexing.
*/
val sha256: String,
/**
* EXIF timestamp (UTC millis).
*/
val capturedAt: Long,
/**
* When this image was indexed into the app.
*/
val ingestedAt: Long,
val width: Int,
val height: Int,
/**
* CAMERA | SCREENSHOT | IMPORTED
*/
val source: String
)

View File

@@ -0,0 +1,42 @@
package com.placeholder.sherpai2.data.local.entity
import androidx.room.Entity
import androidx.room.ForeignKey
import androidx.room.Index
@Entity(
tableName = "image_events",
primaryKeys = ["imageId", "eventId"],
foreignKeys = [
ForeignKey(
entity = ImageEntity::class,
parentColumns = ["imageId"],
childColumns = ["imageId"],
onDelete = ForeignKey.CASCADE
),
ForeignKey(
entity = EventEntity::class,
parentColumns = ["eventId"],
childColumns = ["eventId"],
onDelete = ForeignKey.CASCADE
)
],
indices = [
Index("eventId")
]
)
data class ImageEventEntity(
val imageId: String,
val eventId: String,
/**
* AUTO | MANUAL
*/
val source: String,
/**
* User override flag.
*/
val override: Boolean
)

View File

@@ -0,0 +1,40 @@
package com.placeholder.sherpai2.data.local.entity
import androidx.room.Entity
import androidx.room.ForeignKey
import androidx.room.Index
@Entity(
tableName = "image_persons",
primaryKeys = ["imageId", "personId"],
foreignKeys = [
ForeignKey(
entity = ImageEntity::class,
parentColumns = ["imageId"],
childColumns = ["imageId"],
onDelete = ForeignKey.CASCADE
),
ForeignKey(
entity = PersonEntity::class,
parentColumns = ["id"],
childColumns = ["personId"],
onDelete = ForeignKey.CASCADE
)
],
indices = [
Index("personId")
]
)
data class ImagePersonEntity(
val imageId: String,
val personId: String,
val confidence: Float,
val confirmed: Boolean,
/**
* PUBLIC | PRIVATE
*/
val visibility: String
)

View File

@@ -0,0 +1,56 @@
package com.placeholder.sherpai2.data.local.entity
import androidx.room.Entity
import androidx.room.ForeignKey
import androidx.room.Index
/**
* Join table linking images to tags.
*
* This is NOT optional.
* Do not inline tag lists on ImageEntity.
*/
@Entity(
tableName = "image_tags",
primaryKeys = ["imageId", "tagId"],
foreignKeys = [
ForeignKey(
entity = ImageEntity::class,
parentColumns = ["imageId"],
childColumns = ["imageId"],
onDelete = ForeignKey.CASCADE
),
ForeignKey(
entity = TagEntity::class,
parentColumns = ["tagId"],
childColumns = ["tagId"],
onDelete = ForeignKey.CASCADE
)
],
indices = [
Index("tagId"),
Index("imageId")
]
)
data class ImageTagEntity(
val imageId: String,
val tagId: String,
/**
* AUTO | MANUAL
*/
val source: String,
/**
* ML confidence (01).
*/
val confidence: Float,
/**
* PUBLIC | PRIVATE | HIDDEN
*/
val visibility: String,
val createdAt: Long
)

View File

@@ -0,0 +1,49 @@
package com.placeholder.sherpai2.data.local.entity
import androidx.room.Entity
import androidx.room.PrimaryKey
/**
* PersonEntity - Represents a person in your app
*
* This is a SIMPLE person entity for your existing database.
* Face embeddings are stored separately in FaceModelEntity.
*
* ARCHITECTURE:
* - PersonEntity = Human data (name, birthday, etc.)
* - FaceModelEntity = AI data (face embeddings) - links to this via personId
*
* You can add more fields as needed:
* - birthday: Long?
* - phoneNumber: String?
* - email: String?
* - notes: String?
* - etc.
*/
@Entity(tableName = "persons")
data class PersonEntity(
@PrimaryKey(autoGenerate = true)
val id: Long = 0,
/**
* Person's name
*/
val name: String,
/**
* When this person was added
*/
val createdAt: Long = System.currentTimeMillis(),
/**
* Last time this person's data was updated
*/
val updatedAt: Long = System.currentTimeMillis()
// ADD MORE FIELDS AS NEEDED:
// val birthday: Long? = null,
// val phoneNumber: String? = null,
// val email: String? = null,
// val profilePhotoUri: String? = null,
// val notes: String? = null
)

View File

@@ -0,0 +1,30 @@
package com.placeholder.sherpai2.data.local.entity
import androidx.room.Entity
import androidx.room.PrimaryKey
/**
* Represents a conceptual tag.
*
* Tags are normalized so that:
* - "park" exists once
* - many images can reference it
*/
@Entity(tableName = "tags")
data class TagEntity(
@PrimaryKey
val tagId: String,
/**
* GENERIC | SYSTEM | HIDDEN
*/
val type: String,
/**
* Human-readable value, e.g. "park", "sunset"
*/
val value: String,
val createdAt: Long
)

View File

@@ -0,0 +1,29 @@
package com.placeholder.sherpai2.data.local.model
import androidx.room.Embedded
import androidx.room.Relation
import com.placeholder.sherpai2.data.local.entity.*
data class ImageWithEverything(
@Embedded
val image: ImageEntity,
@Relation(
parentColumn = "imageId",
entityColumn = "imageId"
)
val tags: List<ImageTagEntity>,
@Relation(
parentColumn = "imageId",
entityColumn = "imageId"
)
val persons: List<ImagePersonEntity>,
@Relation(
parentColumn = "imageId",
entityColumn = "imageId"
)
val events: List<ImageEventEntity>
)

View File

@@ -0,0 +1,18 @@
package com.placeholder.sherpai2.data.local.model
import androidx.room.Embedded
import androidx.room.Relation
import com.placeholder.sherpai2.data.local.entity.ImageEntity
import com.placeholder.sherpai2.data.local.entity.ImageTagEntity
data class ImageWithTags(
@Embedded
val image: ImageEntity,
@Relation(
parentColumn = "imageId",
entityColumn = "imageId"
)
val tags: List<ImageTagEntity>
)

View File

@@ -0,0 +1,357 @@
package com.placeholder.sherpai2.data.repository
import android.content.Context
import android.graphics.Bitmap
import com.placeholder.sherpai2.data.local.dao.FaceModelDao
import com.placeholder.sherpai2.data.local.dao.ImageDao
import com.placeholder.sherpai2.data.local.dao.PersonDao
import com.placeholder.sherpai2.data.local.dao.PhotoFaceTagDao
import com.placeholder.sherpai2.data.local.entity.*
import com.placeholder.sherpai2.ml.FaceNetModel
import com.placeholder.sherpai2.ui.trainingprep.TrainingSanityChecker
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.map
import kotlinx.coroutines.withContext
import javax.inject.Inject
import javax.inject.Singleton
/**
* FaceRecognitionRepository - Complete face recognition system
*
* USES STRING IDs TO MATCH YOUR SCHEMA:
* - PersonEntity.id: String (UUID)
* - ImageEntity.imageId: String
* - FaceModelEntity.id: String (UUID)
* - PhotoFaceTagEntity.id: String (UUID)
*/
@Singleton
class FaceRecognitionRepository @Inject constructor(
private val context: Context,
private val personDao: PersonDao,
private val imageDao: ImageDao,
private val faceModelDao: FaceModelDao,
private val photoFaceTagDao: PhotoFaceTagDao
) {
private val faceNetModel by lazy { FaceNetModel(context) }
// ======================
// TRAINING OPERATIONS
// ======================
/**
* Create a new person with face model in one operation.
*
* @return PersonId (String UUID)
*/
suspend fun createPersonWithFaceModel(
personName: String,
validImages: List<TrainingSanityChecker.ValidTrainingImage>,
onProgress: (Int, Int) -> Unit = { _, _ -> }
): String = withContext(Dispatchers.IO) {
// Create PersonEntity with UUID
val person = PersonEntity(name = personName)
personDao.insert(person)
// Train face model
trainPerson(
personId = person.id,
validImages = validImages,
onProgress = onProgress
)
person.id
}
/**
* Train a face recognition model for an existing person.
*
* @param personId String UUID
* @return Face model ID (String UUID)
*/
suspend fun trainPerson(
personId: String,
validImages: List<TrainingSanityChecker.ValidTrainingImage>,
onProgress: (Int, Int) -> Unit = { _, _ -> }
): String = withContext(Dispatchers.Default) {
val person = personDao.getPersonById(personId)
?: throw IllegalArgumentException("Person with ID $personId not found")
val embeddings = faceNetModel.generateEmbeddingsBatch(
faceBitmaps = validImages.map { it.croppedFaceBitmap },
onProgress = onProgress
)
val personEmbedding = faceNetModel.createPersonModel(embeddings)
val confidences = embeddings.map { embedding ->
faceNetModel.calculateSimilarity(personEmbedding, embedding)
}
val avgConfidence = confidences.average().toFloat()
val faceModel = FaceModelEntity.create(
personId = personId,
embeddingArray = personEmbedding,
trainingImageCount = validImages.size,
averageConfidence = avgConfidence
)
faceModelDao.insertFaceModel(faceModel)
faceModel.id
}
/**
* Retrain face model with additional images.
*/
suspend fun retrainFaceModel(
faceModelId: String,
newFaceImages: List<Bitmap>
) = withContext(Dispatchers.Default) {
val faceModel = faceModelDao.getFaceModelById(faceModelId)
?: throw IllegalArgumentException("Face model $faceModelId not found")
val existingEmbedding = faceModel.getEmbeddingArray()
val newEmbeddings = faceNetModel.generateEmbeddingsBatch(newFaceImages)
val allEmbeddings = listOf(existingEmbedding) + newEmbeddings
val updatedEmbedding = faceNetModel.createPersonModel(allEmbeddings)
val confidences = allEmbeddings.map { embedding ->
faceNetModel.calculateSimilarity(updatedEmbedding, embedding)
}
val avgConfidence = confidences.average().toFloat()
faceModelDao.updateFaceModel(
FaceModelEntity.create(
personId = faceModel.personId,
embeddingArray = updatedEmbedding,
trainingImageCount = faceModel.trainingImageCount + newFaceImages.size,
averageConfidence = avgConfidence
).copy(
id = faceModelId,
createdAt = faceModel.createdAt,
updatedAt = System.currentTimeMillis()
)
)
}
// ======================
// SCANNING / RECOGNITION
// ======================
/**
* Scan an image for faces and tag recognized persons.
*
* @param imageId String (from ImageEntity.imageId)
*/
suspend fun scanImage(
imageId: String,
detectedFaces: List<DetectedFace>,
threshold: Float = FaceNetModel.SIMILARITY_THRESHOLD_HIGH
): List<PhotoFaceTagEntity> = withContext(Dispatchers.Default) {
val faceModels = faceModelDao.getAllActiveFaceModels()
if (faceModels.isEmpty()) {
return@withContext emptyList()
}
val tags = mutableListOf<PhotoFaceTagEntity>()
for (detectedFace in detectedFaces) {
val faceEmbedding = faceNetModel.generateEmbedding(detectedFace.croppedBitmap)
var bestMatch: Pair<String, Float>? = null
var highestSimilarity = threshold
for (faceModel in faceModels) {
val modelEmbedding = faceModel.getEmbeddingArray()
val similarity = faceNetModel.calculateSimilarity(faceEmbedding, modelEmbedding)
if (similarity > highestSimilarity) {
highestSimilarity = similarity
bestMatch = Pair(faceModel.id, similarity)
}
}
if (bestMatch != null) {
val (faceModelId, confidence) = bestMatch
val tag = PhotoFaceTagEntity.create(
imageId = imageId,
faceModelId = faceModelId,
boundingBox = detectedFace.boundingBox,
confidence = confidence,
faceEmbedding = faceEmbedding
)
tags.add(tag)
faceModelDao.updateLastUsed(faceModelId, System.currentTimeMillis())
}
}
if (tags.isNotEmpty()) {
photoFaceTagDao.insertTags(tags)
}
tags
}
/**
* Recognize a single face bitmap (without saving).
*/
suspend fun recognizeFace(
faceBitmap: Bitmap,
threshold: Float = FaceNetModel.SIMILARITY_THRESHOLD_HIGH
): Pair<String, Float>? = withContext(Dispatchers.Default) {
val faceEmbedding = faceNetModel.generateEmbedding(faceBitmap)
val faceModels = faceModelDao.getAllActiveFaceModels()
val modelEmbeddings = faceModels.map { it.id to it.getEmbeddingArray() }
faceNetModel.findBestMatch(faceEmbedding, modelEmbeddings, threshold)
}
// ======================
// SEARCH / QUERY
// ======================
/**
* Get all images containing a specific person.
*
* @param personId String UUID
*/
suspend fun getImagesForPerson(personId: String): List<ImageEntity> = withContext(Dispatchers.IO) {
val faceModel = faceModelDao.getFaceModelByPersonId(personId)
?: return@withContext emptyList()
val imageIds = photoFaceTagDao.getImageIdsForFaceModel(faceModel.id)
imageDao.getImagesByIds(imageIds)
}
/**
* Get images for person as Flow (reactive).
*/
fun getImagesForPersonFlow(personId: String): Flow<List<ImageEntity>> {
return photoFaceTagDao.getImageIdsForFaceModelFlow(personId)
.map { imageIds ->
imageDao.getImagesByIds(imageIds)
}
}
/**
* Get all persons with face models.
*/
suspend fun getPersonsWithFaceModels(): List<PersonEntity> = withContext(Dispatchers.IO) {
val faceModels = faceModelDao.getAllActiveFaceModels()
val personIds = faceModels.map { it.personId }
personDao.getPersonsByIds(personIds)
}
/**
* Get face detection stats for a person.
*/
suspend fun getPersonFaceStats(personId: String): PersonFaceStats? = withContext(Dispatchers.IO) {
val person = personDao.getPersonById(personId) ?: return@withContext null
val faceModel = faceModelDao.getFaceModelByPersonId(personId) ?: return@withContext null
val imageIds = photoFaceTagDao.getImageIdsForFaceModel(faceModel.id)
val allTags = photoFaceTagDao.getAllTagsForFaceModel(faceModel.id)
val avgConfidence = if (allTags.isNotEmpty()) {
allTags.map { it.confidence }.average().toFloat()
} else {
0f
}
val lastDetected = allTags.maxOfOrNull { it.detectedAt }
PersonFaceStats(
personId = person.id,
personName = person.name,
faceModelId = faceModel.id,
trainingImageCount = faceModel.trainingImageCount,
taggedPhotoCount = imageIds.size,
averageConfidence = avgConfidence,
lastDetectedAt = lastDetected
)
}
/**
* Get face tags for an image.
*/
suspend fun getFaceTagsForImage(imageId: String): List<PhotoFaceTagEntity> {
return photoFaceTagDao.getTagsForImage(imageId)
}
/**
* Get person from a face tag.
*/
suspend fun getPersonForFaceTag(tag: PhotoFaceTagEntity): PersonEntity? = withContext(Dispatchers.IO) {
val faceModel = faceModelDao.getFaceModelById(tag.faceModelId) ?: return@withContext null
personDao.getPersonById(faceModel.personId)
}
/**
* Get face tags with person info for an image.
*/
suspend fun getFaceTagsWithPersons(imageId: String): List<Pair<PhotoFaceTagEntity, PersonEntity>> = withContext(Dispatchers.IO) {
val tags = photoFaceTagDao.getTagsForImage(imageId)
tags.mapNotNull { tag ->
val person = getPersonForFaceTag(tag)
if (person != null) tag to person else null
}
}
// ======================
// VERIFICATION / QUALITY
// ======================
suspend fun verifyFaceTag(tagId: String) {
photoFaceTagDao.markTagAsVerified(tagId)
}
suspend fun getUnverifiedTags(): List<PhotoFaceTagEntity> {
return photoFaceTagDao.getUnverifiedTags()
}
suspend fun getLowConfidenceTags(threshold: Float = 0.7f): List<PhotoFaceTagEntity> {
return photoFaceTagDao.getLowConfidenceTags(threshold)
}
// ======================
// MANAGEMENT
// ======================
suspend fun deleteFaceModel(faceModelId: String) = withContext(Dispatchers.IO) {
photoFaceTagDao.deleteTagsForFaceModel(faceModelId)
faceModelDao.deleteFaceModelById(faceModelId)
}
suspend fun deleteTagsForImage(imageId: String) {
photoFaceTagDao.deleteTagsForImage(imageId)
}
fun cleanup() {
faceNetModel.close()
}
}
data class DetectedFace(
val croppedBitmap: Bitmap,
val boundingBox: android.graphics.Rect
)
data class PersonFaceStats(
val personId: String,
val personName: String,
val faceModelId: String,
val trainingImageCount: Int,
val taggedPhotoCount: Int,
val averageConfidence: Float,
val lastDetectedAt: Long?
)

View File

@@ -0,0 +1,108 @@
package com.placeholder.sherpai2.di
import android.content.Context
import androidx.room.Room
import com.placeholder.sherpai2.data.local.AppDatabase
import com.placeholder.sherpai2.data.local.dao.*
import dagger.Module
import dagger.Provides
import dagger.hilt.InstallIn
import dagger.hilt.android.qualifiers.ApplicationContext
import dagger.hilt.components.SingletonComponent
import javax.inject.Singleton
/**
* DatabaseModule - Provides database and DAOs
*
* FRESH START VERSION:
* - No migration needed
* - Uses fallbackToDestructiveMigration (deletes old database)
* - Perfect for development
*/
@Module
@InstallIn(SingletonComponent::class)
object DatabaseModule {
@Provides
@Singleton
fun provideDatabase(
@ApplicationContext context: Context
): AppDatabase {
return Room.databaseBuilder(
context,
AppDatabase::class.java,
"sherpai.db"
)
.fallbackToDestructiveMigration() // ← Deletes old database, creates fresh
.build()
}
// ===== YOUR EXISTING DAOs =====
@Provides
fun provideImageDao(database: AppDatabase): ImageDao {
return database.imageDao()
}
@Provides
fun provideTagDao(database: AppDatabase): TagDao {
return database.tagDao()
}
@Provides
fun provideEventDao(database: AppDatabase): EventDao {
return database.eventDao()
}
@Provides
fun provideImageTagDao(database: AppDatabase): ImageTagDao {
return database.imageTagDao()
}
@Provides
fun provideImagePersonDao(database: AppDatabase): ImagePersonDao {
return database.imagePersonDao()
}
@Provides
fun provideImageEventDao(database: AppDatabase): ImageEventDao {
return database.imageEventDao()
}
@Provides
fun provideImageAggregateDao(database: AppDatabase): ImageAggregateDao {
return database.imageAggregateDao()
}
// ===== NEW FACE RECOGNITION DAOs =====
@Provides
fun providePersonDao(database: AppDatabase): PersonDao {
return database.personDao()
}
@Provides
fun provideFaceModelDao(database: AppDatabase): FaceModelDao {
return database.faceModelDao()
}
@Provides
fun providePhotoFaceTagDao(database: AppDatabase): PhotoFaceTagDao {
return database.photoFaceTagDao()
}
}
/**
* NOTES:
*
* fallbackToDestructiveMigration():
* - Deletes database if schema changes
* - Creates fresh database with new schema
* - Perfect for development
* - ⚠️ Users lose data on updates
*
* For production later:
* - Remove fallbackToDestructiveMigration()
* - Add .addMigrations(MIGRATION_1_2, MIGRATION_2_3, ...)
* - This preserves user data
*/

View File

@@ -0,0 +1,34 @@
package com.placeholder.sherpai2.di
import android.content.Context
import com.placeholder.sherpai2.ml.FaceNetModel
import dagger.Module
import dagger.Provides
import dagger.hilt.InstallIn
import dagger.hilt.android.qualifiers.ApplicationContext
import dagger.hilt.components.SingletonComponent
import javax.inject.Singleton
/**
* MLModule - Provides ML-related dependencies
*
* This module provides FaceNetModel for dependency injection
*/
@Module
@InstallIn(SingletonComponent::class)
object MLModule {
/**
* Provide FaceNetModel singleton
*
* FaceNetModel loads the MobileFaceNet TFLite model and manages
* face embedding generation for recognition.
*/
@Provides
@Singleton
fun provideFaceNetModel(
@ApplicationContext context: Context
): FaceNetModel {
return FaceNetModel(context)
}
}

View File

@@ -0,0 +1,90 @@
package com.placeholder.sherpai2.di
import android.content.Context
import com.placeholder.sherpai2.data.local.dao.FaceModelDao
import com.placeholder.sherpai2.data.local.dao.ImageDao
import com.placeholder.sherpai2.data.local.dao.PersonDao
import com.placeholder.sherpai2.data.local.dao.PhotoFaceTagDao
import com.placeholder.sherpai2.data.repository.FaceRecognitionRepository
import com.placeholder.sherpai2.data.repository.TaggingRepositoryImpl
import com.placeholder.sherpai2.domain.repository.ImageRepository
import com.placeholder.sherpai2.domain.repository.ImageRepositoryImpl
import com.placeholder.sherpai2.domain.repository.TaggingRepository
import dagger.Binds
import dagger.Module
import dagger.Provides
import dagger.hilt.InstallIn
import dagger.hilt.android.qualifiers.ApplicationContext
import dagger.hilt.components.SingletonComponent
import javax.inject.Singleton
/**
* RepositoryModule - Provides repository implementations
*
* UPDATED TO INCLUDE:
* - FaceRecognitionRepository for face recognition operations
*/
@Module
@InstallIn(SingletonComponent::class)
abstract class RepositoryModule {
// ===== EXISTING REPOSITORY BINDINGS =====
@Binds
@Singleton
abstract fun bindImageRepository(
impl: ImageRepositoryImpl
): ImageRepository
@Binds
@Singleton
abstract fun bindTaggingRepository(
impl: TaggingRepositoryImpl
): TaggingRepository
// ===== COMPANION OBJECT FOR PROVIDES =====
companion object {
/**
* Provide FaceRecognitionRepository
*
* Uses @Provides instead of @Binds because it needs Context parameter
* and multiple DAO dependencies
*
* INJECTED DEPENDENCIES:
* - Context: For FaceNetModel initialization
* - PersonDao: Access existing persons
* - ImageDao: Access existing images
* - FaceModelDao: Manage face models
* - PhotoFaceTagDao: Manage photo tags
*
* USAGE IN VIEWMODEL:
* ```
* @HiltViewModel
* class MyViewModel @Inject constructor(
* private val faceRecognitionRepository: FaceRecognitionRepository
* ) : ViewModel() {
* // Use repository methods
* }
* ```
*/
@Provides
@Singleton
fun provideFaceRecognitionRepository(
@ApplicationContext context: Context,
personDao: PersonDao,
imageDao: ImageDao,
faceModelDao: FaceModelDao,
photoFaceTagDao: PhotoFaceTagDao
): FaceRecognitionRepository {
return FaceRecognitionRepository(
context = context,
personDao = personDao,
imageDao = imageDao,
faceModelDao = faceModelDao,
photoFaceTagDao = photoFaceTagDao
)
}
}
}

View File

@@ -0,0 +1,33 @@
package com.placeholder.sherpai2.domain.repository
import com.placeholder.sherpai2.data.local.model.ImageWithEverything
import kotlinx.coroutines.flow.Flow
/**
* Canonical access point for images.
*
* ViewModels must NEVER talk directly to DAOs.
*/
interface ImageRepository {
/**
* Observe a fully-hydrated image graph.
*
* Used by detail screens.
*/
fun observeImage(imageId: String): Flow<ImageWithEverything>
/**
* Ingest images discovered on device.
*
* This function:
* - deduplicates
* - assigns events automatically
*/
suspend fun ingestImages()
fun getAllImages(): Flow<List<ImageWithEverything>>
fun findImagesByTag(tag: String): Flow<List<ImageWithEverything>>
fun getRecentImages(limit: Int): Flow<List<ImageWithEverything>>
}

View File

@@ -0,0 +1,147 @@
package com.placeholder.sherpai2.domain.repository
import android.content.ContentUris
import android.content.Context
import android.net.Uri
import android.provider.MediaStore
import android.util.Log
import com.placeholder.sherpai2.data.local.dao.EventDao
import com.placeholder.sherpai2.data.local.dao.ImageAggregateDao
import com.placeholder.sherpai2.data.local.dao.ImageDao
import com.placeholder.sherpai2.data.local.dao.ImageEventDao
import com.placeholder.sherpai2.data.local.entity.ImageEntity
import com.placeholder.sherpai2.data.local.model.ImageWithEverything
import dagger.hilt.android.qualifiers.ApplicationContext
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.withContext
import java.security.MessageDigest
import java.util.*
import javax.inject.Inject
import javax.inject.Singleton
@Singleton
class ImageRepositoryImpl @Inject constructor(
private val imageDao: ImageDao,
private val eventDao: EventDao,
private val imageEventDao: ImageEventDao,
private val aggregateDao: ImageAggregateDao,
@ApplicationContext private val context: Context
) : ImageRepository {
override fun observeImage(imageId: String): Flow<ImageWithEverything> {
return aggregateDao.observeImageWithEverything(imageId)
}
/**
* Ingest all images from MediaStore.
* Uses _ID and DATE_ADDED to ensure no image is skipped, even if DATE_TAKEN is identical.
*/
override suspend fun ingestImages(): Unit = withContext(Dispatchers.IO) {
try {
val imageList = mutableListOf<ImageEntity>()
val projection = arrayOf(
MediaStore.Images.Media._ID,
MediaStore.Images.Media.DISPLAY_NAME,
MediaStore.Images.Media.DATE_TAKEN,
MediaStore.Images.Media.DATE_ADDED,
MediaStore.Images.Media.WIDTH,
MediaStore.Images.Media.HEIGHT
)
val sortOrder = "${MediaStore.Images.Media.DATE_ADDED} ASC"
context.contentResolver.query(
MediaStore.Images.Media.EXTERNAL_CONTENT_URI,
projection,
null,
null,
sortOrder
)?.use { cursor ->
val idCol = cursor.getColumnIndexOrThrow(MediaStore.Images.Media._ID)
val nameCol = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DISPLAY_NAME)
val dateTakenCol = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATE_TAKEN)
val dateAddedCol = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATE_ADDED)
val widthCol = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.WIDTH)
val heightCol = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.HEIGHT)
while (cursor.moveToNext()) {
val id = cursor.getLong(idCol)
val displayName = cursor.getString(nameCol)
val dateTaken = cursor.getLong(dateTakenCol)
val dateAdded = cursor.getLong(dateAddedCol)
val width = cursor.getInt(widthCol)
val height = cursor.getInt(heightCol)
val contentUri: Uri = ContentUris.withAppendedId(
MediaStore.Images.Media.EXTERNAL_CONTENT_URI, id
)
val sha256 = computeSHA256(contentUri)
if (sha256 == null) {
Log.w("ImageRepository", "Skipped image: $displayName (cannot read bytes)")
continue
}
val imageEntity = ImageEntity(
imageId = UUID.randomUUID().toString(),
imageUri = contentUri.toString(),
sha256 = sha256,
capturedAt = if (dateTaken > 0) dateTaken else dateAdded * 1000,
ingestedAt = System.currentTimeMillis(),
width = width,
height = height,
source = "CAMERA" // or SCREENSHOT / IMPORTED
)
imageList += imageEntity
Log.i("ImageRepository", "Processing image: $displayName, SHA256: $sha256")
}
}
if (imageList.isNotEmpty()) {
imageDao.insertImages(imageList)
Log.i("ImageRepository", "Ingested ${imageList.size} images")
} else {
Log.i("ImageRepository", "No images found on device")
}
} catch (e: Exception) {
Log.e("ImageRepository", "Error ingesting images", e)
}
}
/**
* Compute SHA256 from a MediaStore Uri safely.
*/
private fun computeSHA256(uri: Uri): String? {
return try {
val digest = MessageDigest.getInstance("SHA-256")
context.contentResolver.openInputStream(uri)?.use { input ->
val buffer = ByteArray(8192)
var read: Int
while (input.read(buffer).also { read = it } > 0) {
digest.update(buffer, 0, read)
}
} ?: return null
digest.digest().joinToString("") { "%02x".format(it) }
} catch (e: Exception) {
Log.e("ImageRepository", "Failed SHA256 for $uri", e)
null
}
}
override fun getAllImages(): Flow<List<ImageWithEverything>> {
return aggregateDao.observeAllImagesWithEverything()
}
override fun findImagesByTag(tag: String): Flow<List<ImageWithEverything>> {
return aggregateDao.observeImagesWithTag(tag)
}
override fun getRecentImages(limit: Int): Flow<List<ImageWithEverything>> {
return imageDao.getRecentImages(limit)
}
}

View File

@@ -0,0 +1,30 @@
package com.placeholder.sherpai2.domain.repository
import com.placeholder.sherpai2.data.local.entity.TagEntity
import kotlinx.coroutines.flow.Flow
/**
* Handles all tagging operations.
*
* This repository is the ONLY place where:
* - tags are attached
* - visibility rules are applied
*/
interface TaggingRepository {
suspend fun addTagToImage(
imageId: String,
tagValue: String,
source: String,
confidence: Float
)
suspend fun hideTagForImage(
imageId: String,
tagValue: String
)
fun getTagsForImage(imageId: String): Flow<List<TagEntity>>
suspend fun removeTagFromImage(imageId: String, tagId: String)
}

View File

@@ -0,0 +1,97 @@
package com.placeholder.sherpai2.data.repository
import com.placeholder.sherpai2.data.local.dao.ImageTagDao
import com.placeholder.sherpai2.data.local.dao.TagDao
import com.placeholder.sherpai2.data.local.entity.ImageTagEntity
import com.placeholder.sherpai2.data.local.entity.TagEntity
import com.placeholder.sherpai2.domain.repository.TaggingRepository
import kotlinx.coroutines.flow.Flow
import javax.inject.Inject
import javax.inject.Singleton
/**
*
*
* Critical design decisions here
*
* Tag normalization happens once
*
* Visibility rules live here
*
* ML and manual tagging share the same path
*/
@Singleton
class TaggingRepositoryImpl @Inject constructor(
private val tagDao: TagDao,
private val imageTagDao: ImageTagDao
) : TaggingRepository {
override suspend fun addTagToImage(
imageId: String,
tagValue: String,
source: String,
confidence: Float
) {
// Step 1: normalize tag
val normalized = tagValue.trim().lowercase()
// Step 2: ensure tag exists
val tag = tagDao.getByValue(normalized)
?: TagEntity(
tagId = "tag_$normalized",
type = "GENERIC",
value = normalized,
createdAt = System.currentTimeMillis()
).also { tagDao.insert(it) }
// Step 3: attach tag to image
imageTagDao.upsert(
ImageTagEntity(
imageId = imageId,
tagId = tag.tagId,
source = source,
confidence = confidence,
visibility = "PUBLIC",
createdAt = System.currentTimeMillis()
)
)
}
override suspend fun hideTagForImage(
imageId: String,
tagValue: String
) {
val tag = tagDao.getByValue(tagValue) ?: return
imageTagDao.upsert(
ImageTagEntity(
imageId = imageId,
tagId = tag.tagId,
source = "MANUAL",
confidence = 1.0f,
visibility = "HIDDEN",
createdAt = System.currentTimeMillis()
)
)
}
override fun getTagsForImage(imageId: String): Flow<List<TagEntity>> {
// Join imageTagDao -> tagDao to get all PUBLIC tags for this image
return imageTagDao.getTagsForImage(imageId)
}
override suspend fun removeTagFromImage(imageId: String, tagId: String) {
// Mark the tag as hidden instead of deleting, keeping the visibility logic
imageTagDao.upsert(
ImageTagEntity(
imageId = imageId,
tagId = tagId,
source = "MANUAL",
confidence = 1.0f,
visibility = "HIDDEN",
createdAt = System.currentTimeMillis()
)
)
}
}

View File

@@ -0,0 +1,204 @@
package com.placeholder.sherpai2.ml
import android.content.Context
import android.graphics.Bitmap
import org.tensorflow.lite.Interpreter
import java.io.FileInputStream
import java.nio.ByteBuffer
import java.nio.ByteOrder
import java.nio.MappedByteBuffer
import java.nio.channels.FileChannel
import kotlin.math.sqrt
/**
* FaceNetModel - MobileFaceNet wrapper for face recognition
*
* CLEAN IMPLEMENTATION:
* - All IDs are Strings (matching your schema)
* - Generates 192-dimensional embeddings
* - Cosine similarity for matching
*/
class FaceNetModel(private val context: Context) {
companion object {
private const val MODEL_FILE = "mobilefacenet.tflite"
private const val INPUT_SIZE = 112
private const val EMBEDDING_SIZE = 192
const val SIMILARITY_THRESHOLD_HIGH = 0.7f
const val SIMILARITY_THRESHOLD_MEDIUM = 0.6f
const val SIMILARITY_THRESHOLD_LOW = 0.5f
}
private var interpreter: Interpreter? = null
init {
try {
val model = loadModelFile()
interpreter = Interpreter(model)
} catch (e: Exception) {
throw RuntimeException("Failed to load FaceNet model", e)
}
}
/**
* Load TFLite model from assets
*/
private fun loadModelFile(): MappedByteBuffer {
val fileDescriptor = context.assets.openFd(MODEL_FILE)
val inputStream = FileInputStream(fileDescriptor.fileDescriptor)
val fileChannel = inputStream.channel
val startOffset = fileDescriptor.startOffset
val declaredLength = fileDescriptor.declaredLength
return fileChannel.map(FileChannel.MapMode.READ_ONLY, startOffset, declaredLength)
}
/**
* Generate embedding for a single face
*
* @param faceBitmap Cropped face image (will be resized to 112x112)
* @return 192-dimensional embedding
*/
fun generateEmbedding(faceBitmap: Bitmap): FloatArray {
val resized = Bitmap.createScaledBitmap(faceBitmap, INPUT_SIZE, INPUT_SIZE, true)
val inputBuffer = preprocessImage(resized)
val output = Array(1) { FloatArray(EMBEDDING_SIZE) }
interpreter?.run(inputBuffer, output)
return normalizeEmbedding(output[0])
}
/**
* Generate embeddings for multiple faces (batch processing)
*/
fun generateEmbeddingsBatch(
faceBitmaps: List<Bitmap>,
onProgress: (Int, Int) -> Unit = { _, _ -> }
): List<FloatArray> {
return faceBitmaps.mapIndexed { index, bitmap ->
onProgress(index + 1, faceBitmaps.size)
generateEmbedding(bitmap)
}
}
/**
* Create person model by averaging multiple embeddings
*/
fun createPersonModel(embeddings: List<FloatArray>): FloatArray {
require(embeddings.isNotEmpty()) { "Need at least one embedding" }
val averaged = FloatArray(EMBEDDING_SIZE) { 0f }
embeddings.forEach { embedding ->
for (i in embedding.indices) {
averaged[i] += embedding[i]
}
}
val count = embeddings.size.toFloat()
for (i in averaged.indices) {
averaged[i] /= count
}
return normalizeEmbedding(averaged)
}
/**
* Calculate cosine similarity between two embeddings
* Returns value between -1.0 and 1.0 (higher = more similar)
*/
fun calculateSimilarity(embedding1: FloatArray, embedding2: FloatArray): Float {
require(embedding1.size == EMBEDDING_SIZE && embedding2.size == EMBEDDING_SIZE) {
"Invalid embedding size"
}
var dotProduct = 0f
var norm1 = 0f
var norm2 = 0f
for (i in embedding1.indices) {
dotProduct += embedding1[i] * embedding2[i]
norm1 += embedding1[i] * embedding1[i]
norm2 += embedding2[i] * embedding2[i]
}
return dotProduct / (sqrt(norm1) * sqrt(norm2))
}
/**
* Find best matching face model from a list
*
* @param faceEmbedding Embedding to match
* @param modelEmbeddings List of (modelId: String, embedding: FloatArray)
* @param threshold Minimum similarity threshold
* @return Pair of (modelId: String, confidence: Float) or null
*/
fun findBestMatch(
faceEmbedding: FloatArray,
modelEmbeddings: List<Pair<String, FloatArray>>,
threshold: Float = SIMILARITY_THRESHOLD_HIGH
): Pair<String, Float>? {
var bestMatch: Pair<String, Float>? = null
var highestSimilarity = threshold
for ((modelId, modelEmbedding) in modelEmbeddings) {
val similarity = calculateSimilarity(faceEmbedding, modelEmbedding)
if (similarity > highestSimilarity) {
highestSimilarity = similarity
bestMatch = Pair(modelId, similarity)
}
}
return bestMatch
}
/**
* Preprocess image for model input
*/
private fun preprocessImage(bitmap: Bitmap): ByteBuffer {
val buffer = ByteBuffer.allocateDirect(4 * INPUT_SIZE * INPUT_SIZE * 3)
buffer.order(ByteOrder.nativeOrder())
val pixels = IntArray(INPUT_SIZE * INPUT_SIZE)
bitmap.getPixels(pixels, 0, INPUT_SIZE, 0, 0, INPUT_SIZE, INPUT_SIZE)
for (pixel in pixels) {
val r = ((pixel shr 16) and 0xFF) / 255.0f
val g = ((pixel shr 8) and 0xFF) / 255.0f
val b = (pixel and 0xFF) / 255.0f
buffer.putFloat((r - 0.5f) / 0.5f)
buffer.putFloat((g - 0.5f) / 0.5f)
buffer.putFloat((b - 0.5f) / 0.5f)
}
return buffer
}
/**
* Normalize embedding to unit length
*/
private fun normalizeEmbedding(embedding: FloatArray): FloatArray {
var norm = 0f
for (value in embedding) {
norm += value * value
}
norm = sqrt(norm)
return if (norm > 0) {
FloatArray(embedding.size) { i -> embedding[i] / norm }
} else {
embedding
}
}
/**
* Clean up resources
*/
fun close() {
interpreter?.close()
interpreter = null
}
}

View File

@@ -1,36 +0,0 @@
// In navigation/AppDestinations.kt
package com.placeholder.sherpai2.navigation
import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.filled.*
import androidx.compose.ui.graphics.vector.ImageVector
/**
* Defines all navigation destinations (screens) for the application.
*/
sealed class AppDestinations(val route: String, val icon: ImageVector, val label: String) {
// Core Functional Sections
object Search : AppDestinations("search", Icons.Default.Search, "Search")
object Models : AppDestinations("models", Icons.Default.Layers, "Models")
object Inventory : AppDestinations("inv", Icons.Default.Inventory2, "Inv")
object Train : AppDestinations("train", Icons.Default.TrackChanges, "Train")
object Tags : AppDestinations("tags", Icons.Default.LocalOffer, "Tags")
// Utility/Secondary Sections
object Upload : AppDestinations("upload", Icons.Default.CloudUpload, "Upload")
object Settings : AppDestinations("settings", Icons.Default.Settings, "Settings")
}
// Lists used by the AppDrawerContent to render the menu sections easily
val mainDrawerItems = listOf(
AppDestinations.Search,
AppDestinations.Models,
AppDestinations.Inventory,
AppDestinations.Train,
AppDestinations.Tags
)
val utilityDrawerItems = listOf(
AppDestinations.Upload,
AppDestinations.Settings
)

View File

@@ -1,57 +0,0 @@
// In presentation/MainScreen.kt
package com.placeholder.sherpai2.presentation
import androidx.compose.foundation.layout.padding
import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.filled.Menu
import androidx.compose.material3.*
import androidx.compose.runtime.*
import androidx.compose.ui.Modifier
import com.placeholder.sherpai2.navigation.AppDestinations
import kotlinx.coroutines.launch
@OptIn(ExperimentalMaterial3Api::class)
@Composable
fun MainScreen() {
val drawerState = rememberDrawerState(initialValue = DrawerValue.Closed)
val scope = rememberCoroutineScope()
// State to track which screen is currently visible
//var currentScreen by remember { mutableStateOf(AppDestinations.Search) }
var currentScreen: AppDestinations by remember { mutableStateOf(AppDestinations.Search) }
// ModalNavigationDrawer provides the left sidebar UI/UX
ModalNavigationDrawer(
drawerState = drawerState,
drawerContent = {
// The content of the drawer (AppDrawerContent)
AppDrawerContent(
currentScreen = currentScreen,
onDestinationClicked = { destination ->
currentScreen = destination
scope.launch { drawerState.close() } // Close drawer after selection
}
)
},
) {
// The main content area
Scaffold(
topBar = {
TopAppBar(
title = { Text(currentScreen.label) },
// Button to open the drawer
navigationIcon = {
IconButton(onClick = { scope.launch { drawerState.open() } }) {
Icon(Icons.Filled.Menu, contentDescription = "Open Drawer")
}
}
)
}
) { paddingValues ->
// Displays the content for the currently selected screen
MainContentArea(
currentScreen = currentScreen,
modifier = Modifier.padding(paddingValues)
)
}
}
}

View File

@@ -1,59 +0,0 @@
// In presentation/AppDrawerContent.kt
package com.placeholder.sherpai2.presentation
import androidx.compose.foundation.layout.*
import androidx.compose.material3.*
import androidx.compose.runtime.Composable
import androidx.compose.ui.Modifier
import androidx.compose.ui.unit.dp
import com.placeholder.sherpai2.navigation.AppDestinations
import com.placeholder.sherpai2.navigation.mainDrawerItems
import com.placeholder.sherpai2.navigation.utilityDrawerItems
@OptIn(ExperimentalMaterial3Api::class)
@Composable
fun AppDrawerContent(
currentScreen: AppDestinations,
onDestinationClicked: (AppDestinations) -> Unit
) {
// Defines the width and content of the sliding drawer panel
ModalDrawerSheet(modifier = Modifier.width(280.dp)) {
// Header/Logo Area
Text(
"SherpAI Control Panel",
style = MaterialTheme.typography.headlineSmall,
modifier = Modifier.padding(16.dp)
)
Divider(Modifier.fillMaxWidth())
// 1. Main Navigation Items
Column(modifier = Modifier.padding(vertical = 8.dp)) {
mainDrawerItems.forEach { destination ->
NavigationDrawerItem(
label = { Text(destination.label) },
icon = { Icon(destination.icon, contentDescription = destination.label) },
selected = destination == currentScreen,
onClick = { onDestinationClicked(destination) },
modifier = Modifier.padding(NavigationDrawerItemDefaults.ItemPadding)
)
}
}
// Separator
Divider(Modifier.fillMaxWidth().padding(vertical = 8.dp))
// 2. Utility Items
Column(modifier = Modifier.padding(vertical = 8.dp)) {
utilityDrawerItems.forEach { destination ->
NavigationDrawerItem(
label = { Text(destination.label) },
icon = { Icon(destination.icon, contentDescription = destination.label) },
selected = destination == currentScreen,
onClick = { onDestinationClicked(destination) },
modifier = Modifier.padding(NavigationDrawerItemDefaults.ItemPadding)
)
}
}
}
}

View File

@@ -1,42 +0,0 @@
// In presentation/MainContentArea.kt
package com.placeholder.sherpai2.presentation
import androidx.compose.foundation.background
import androidx.compose.foundation.layout.*
import androidx.compose.material3.MaterialTheme
import androidx.compose.material3.Text
import androidx.compose.runtime.Composable
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.unit.dp
import com.placeholder.sherpai2.navigation.AppDestinations
@Composable
fun MainContentArea(currentScreen: AppDestinations, modifier: Modifier = Modifier) {
Box(
modifier = modifier
.fillMaxSize()
.background(MaterialTheme.colorScheme.surfaceVariant),
contentAlignment = Alignment.Center
) {
// Swaps the UI content based on the selected screen from the drawer
when (currentScreen) {
AppDestinations.Search -> SimplePlaceholder("Search Screen: Find your models and data.")
AppDestinations.Models -> SimplePlaceholder("Models Screen: Manage your LoRA/embeddings.")
AppDestinations.Inventory -> SimplePlaceholder("Inventory Screen: View all collected data.")
AppDestinations.Train -> SimplePlaceholder("Train Screen: Start the LoRA adaptation process.")
AppDestinations.Tags -> SimplePlaceholder("Tags Screen: Create and edit custom tags.")
AppDestinations.Upload -> SimplePlaceholder("Upload Screen: Import new photos/data.")
AppDestinations.Settings -> SimplePlaceholder("Settings Screen: Configure app behavior.")
}
}
}
@Composable
private fun SimplePlaceholder(text: String) {
Text(
text = text,
style = MaterialTheme.typography.titleLarge,
modifier = Modifier.padding(16.dp)
)
}

View File

@@ -0,0 +1,162 @@
package com.placeholder.sherpai2.ui.devscreens
import androidx.compose.foundation.background
import androidx.compose.foundation.layout.*
import androidx.compose.foundation.shape.RoundedCornerShape
import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.filled.*
import androidx.compose.material3.*
import androidx.compose.runtime.Composable
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.graphics.Brush
import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.text.style.TextAlign
import androidx.compose.ui.unit.dp
/**
* Beautiful placeholder screen for features under development
*
* Shows:
* - Feature name
* - Description
* - "Coming Soon" indicator
* - Consistent styling with rest of app
*/
@Composable
fun DummyScreen(
title: String,
subtitle: String = "This feature is under development"
) {
Box(
modifier = Modifier
.fillMaxSize()
.background(
Brush.verticalGradient(
colors = listOf(
MaterialTheme.colorScheme.surface,
MaterialTheme.colorScheme.surfaceVariant.copy(alpha = 0.3f)
)
)
),
contentAlignment = Alignment.Center
) {
Column(
horizontalAlignment = Alignment.CenterHorizontally,
verticalArrangement = Arrangement.spacedBy(24.dp),
modifier = Modifier.padding(48.dp)
) {
// Icon badge
Surface(
modifier = Modifier.size(96.dp),
shape = RoundedCornerShape(24.dp),
color = MaterialTheme.colorScheme.primaryContainer,
shadowElevation = 8.dp
) {
Box(contentAlignment = Alignment.Center) {
Icon(
Icons.Default.Construction,
contentDescription = null,
modifier = Modifier.size(48.dp),
tint = MaterialTheme.colorScheme.primary
)
}
}
Spacer(modifier = Modifier.height(8.dp))
// Title
Text(
text = title,
style = MaterialTheme.typography.headlineMedium,
fontWeight = FontWeight.Bold,
textAlign = TextAlign.Center
)
// Subtitle
Text(
text = subtitle,
style = MaterialTheme.typography.bodyLarge,
color = MaterialTheme.colorScheme.onSurfaceVariant,
textAlign = TextAlign.Center,
modifier = Modifier.padding(horizontal = 24.dp)
)
Spacer(modifier = Modifier.height(8.dp))
// Coming soon badge
Surface(
shape = RoundedCornerShape(16.dp),
color = MaterialTheme.colorScheme.tertiaryContainer,
shadowElevation = 2.dp
) {
Row(
modifier = Modifier.padding(horizontal = 20.dp, vertical = 12.dp),
horizontalArrangement = Arrangement.spacedBy(8.dp),
verticalAlignment = Alignment.CenterVertically
) {
Icon(
Icons.Default.Schedule,
contentDescription = null,
modifier = Modifier.size(20.dp),
tint = MaterialTheme.colorScheme.onTertiaryContainer
)
Text(
text = "Coming Soon",
style = MaterialTheme.typography.labelLarge,
fontWeight = FontWeight.SemiBold,
color = MaterialTheme.colorScheme.onTertiaryContainer
)
}
}
Spacer(modifier = Modifier.height(24.dp))
// Feature preview card
Card(
modifier = Modifier.fillMaxWidth(0.8f),
colors = CardDefaults.cardColors(
containerColor = MaterialTheme.colorScheme.surfaceVariant.copy(alpha = 0.5f)
),
shape = RoundedCornerShape(16.dp)
) {
Column(
modifier = Modifier.padding(20.dp),
verticalArrangement = Arrangement.spacedBy(12.dp)
) {
Text(
text = "What's planned:",
style = MaterialTheme.typography.titleSmall,
fontWeight = FontWeight.Bold
)
FeatureItem("Full implementation")
FeatureItem("Beautiful UI design")
FeatureItem("Smooth animations")
FeatureItem("Production-ready code")
}
}
}
}
}
@Composable
private fun FeatureItem(text: String) {
Row(
horizontalArrangement = Arrangement.spacedBy(8.dp),
verticalAlignment = Alignment.CenterVertically
) {
Icon(
Icons.Default.CheckCircle,
contentDescription = null,
modifier = Modifier.size(16.dp),
tint = MaterialTheme.colorScheme.primary
)
Text(
text = text,
style = MaterialTheme.typography.bodyMedium,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
}
}

View File

@@ -0,0 +1,86 @@
package com.placeholder.sherpai2.ui.imagedetail
import androidx.compose.foundation.layout.*
import androidx.compose.material3.*
import androidx.compose.runtime.*
import androidx.compose.ui.Modifier
import androidx.compose.ui.unit.dp
import androidx.lifecycle.compose.collectAsStateWithLifecycle
import coil.compose.AsyncImage
import com.placeholder.sherpai2.ui.imagedetail.viewmodel.ImageDetailViewModel
/**
* ImageDetailScreen
*
* Purpose:
* - Add tags
* - Remove tags
* - Validate write propagation
*/
@Composable
fun ImageDetailScreen(
modifier: Modifier = Modifier,
imageUri: String,
onBack: () -> Unit
) {
val viewModel: ImageDetailViewModel = androidx.lifecycle.viewmodel.compose.viewModel()
LaunchedEffect(imageUri) {
viewModel.loadImage(imageUri)
}
val tags by viewModel.tags.collectAsStateWithLifecycle()
var newTag by remember { mutableStateOf("") }
Column(
modifier = modifier
.fillMaxSize()
.padding(12.dp)
) {
AsyncImage(
model = imageUri,
contentDescription = null,
modifier = Modifier
.fillMaxWidth()
.aspectRatio(1f)
)
Spacer(modifier = Modifier.height(12.dp))
OutlinedTextField(
value = newTag,
onValueChange = { newTag = it },
label = { Text("Add tag") },
modifier = Modifier.fillMaxWidth()
)
Button(
onClick = {
viewModel.addTag(newTag)
newTag = ""
},
modifier = Modifier.padding(top = 8.dp)
) {
Text("Add Tag")
}
Spacer(modifier = Modifier.height(16.dp))
tags.forEach { tag ->
Row(
horizontalArrangement = Arrangement.SpaceBetween,
modifier = Modifier.fillMaxWidth()
) {
Text(tag.value)
TextButton(onClick = { viewModel.removeTag(tag) }) {
Text("Remove")
}
}
}
}
}

View File

@@ -0,0 +1,57 @@
package com.placeholder.sherpai2.ui.imagedetail.viewmodel
import androidx.lifecycle.ViewModel
import androidx.lifecycle.viewModelScope
import com.placeholder.sherpai2.data.local.entity.TagEntity
import com.placeholder.sherpai2.domain.repository.TaggingRepository
import dagger.hilt.android.lifecycle.HiltViewModel
import kotlinx.coroutines.ExperimentalCoroutinesApi
import kotlinx.coroutines.flow.*
import kotlinx.coroutines.launch
import javax.inject.Inject
/**
* ImageDetailViewModel
*
* Owns:
* - Image context
* - Tag write operations
*/
@HiltViewModel
@OptIn(ExperimentalCoroutinesApi::class)
class ImageDetailViewModel @Inject constructor(
private val tagRepository: TaggingRepository
) : ViewModel() {
private val imageUri = MutableStateFlow<String?>(null)
val tags: StateFlow<List<TagEntity>> =
imageUri
.filterNotNull()
.flatMapLatest { uri ->
tagRepository.getTagsForImage(uri)
}
.stateIn(
scope = viewModelScope,
started = SharingStarted.WhileSubscribed(5_000),
initialValue = emptyList()
)
fun loadImage(uri: String) {
imageUri.value = uri
}
fun addTag(value: String) {
val uri = imageUri.value ?: return
viewModelScope.launch {
tagRepository.addTagToImage(uri, value, source = "MANUAL", confidence = 1.0f)
}
}
fun removeTag(tag: TagEntity) {
val uri = imageUri.value ?: return
viewModelScope.launch {
tagRepository.removeTagFromImage(uri, tag.value)
}
}
}

View File

@@ -0,0 +1,614 @@
package com.placeholder.sherpai2.ui.modelinventory
import androidx.compose.foundation.background
import androidx.compose.foundation.layout.*
import androidx.compose.foundation.lazy.LazyColumn
import androidx.compose.foundation.lazy.items
import androidx.compose.foundation.shape.CircleShape
import androidx.compose.foundation.shape.RoundedCornerShape
import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.filled.*
import androidx.compose.material3.*
import androidx.compose.runtime.*
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.draw.clip
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.graphics.vector.ImageVector
import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.text.style.TextOverflow
import androidx.compose.ui.unit.dp
import androidx.hilt.navigation.compose.hiltViewModel
import java.text.SimpleDateFormat
import java.util.*
/**
* PersonInventoryScreen - Manage trained face models
*
* Features:
* - List all trained persons
* - View stats
* - DELETE models
* - SCAN LIBRARY to find person in all photos (NEW!)
*/
@OptIn(ExperimentalMaterial3Api::class)
@Composable
fun PersonInventoryScreen(
modifier: Modifier = Modifier,
viewModel: PersonInventoryViewModel = hiltViewModel(),
onViewPersonPhotos: (String) -> Unit = {}
) {
val uiState by viewModel.uiState.collectAsState()
val scanningState by viewModel.scanningState.collectAsState()
var personToDelete by remember { mutableStateOf<PersonInventoryViewModel.PersonWithStats?>(null) }
var personToScan by remember { mutableStateOf<PersonInventoryViewModel.PersonWithStats?>(null) }
Scaffold(
topBar = {
TopAppBar(
title = { Text("Trained People") },
colors = TopAppBarDefaults.topAppBarColors(
containerColor = MaterialTheme.colorScheme.primaryContainer
),
actions = {
IconButton(onClick = { viewModel.loadPersons() }) {
Icon(Icons.Default.Refresh, contentDescription = "Refresh")
}
}
)
}
) { paddingValues ->
Box(
modifier = modifier
.fillMaxSize()
.padding(paddingValues)
) {
when (val state = uiState) {
is PersonInventoryViewModel.InventoryUiState.Loading -> {
LoadingView()
}
is PersonInventoryViewModel.InventoryUiState.Success -> {
if (state.persons.isEmpty()) {
EmptyView()
} else {
PersonListView(
persons = state.persons,
onDeleteClick = { personToDelete = it },
onScanClick = { personToScan = it },
onViewPhotos = { onViewPersonPhotos(it.person.id) },
scanningState = scanningState
)
}
}
is PersonInventoryViewModel.InventoryUiState.Error -> {
ErrorView(
message = state.message,
onRetry = { viewModel.loadPersons() }
)
}
}
// Scanning overlay
if (scanningState is PersonInventoryViewModel.ScanningState.Scanning) {
ScanningOverlay(scanningState as PersonInventoryViewModel.ScanningState.Scanning)
}
}
}
// Delete confirmation dialog
personToDelete?.let { personWithStats ->
AlertDialog(
onDismissRequest = { personToDelete = null },
title = { Text("Delete ${personWithStats.person.name}?") },
text = {
Text(
"This will delete the face model and all ${personWithStats.stats.taggedPhotoCount} " +
"face tags. Your photos will NOT be deleted."
)
},
confirmButton = {
TextButton(
onClick = {
viewModel.deletePerson(
personWithStats.person.id,
personWithStats.stats.faceModelId
)
personToDelete = null
},
colors = ButtonDefaults.textButtonColors(
contentColor = MaterialTheme.colorScheme.error
)
) {
Text("Delete")
}
},
dismissButton = {
TextButton(onClick = { personToDelete = null }) {
Text("Cancel")
}
}
)
}
// Scan library confirmation dialog
personToScan?.let { personWithStats ->
AlertDialog(
onDismissRequest = { personToScan = null },
icon = { Icon(Icons.Default.Search, contentDescription = null) },
title = { Text("Scan Library for ${personWithStats.person.name}?") },
text = {
Column(verticalArrangement = Arrangement.spacedBy(12.dp)) {
Text(
"This will scan your entire photo library and automatically tag " +
"all photos containing ${personWithStats.person.name}."
)
Text(
"Currently tagged: ${personWithStats.stats.taggedPhotoCount} photos",
style = MaterialTheme.typography.bodySmall,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
}
},
confirmButton = {
Button(
onClick = {
viewModel.scanLibraryForPerson(
personWithStats.person.id,
personWithStats.stats.faceModelId
)
personToScan = null
}
) {
Icon(Icons.Default.Search, contentDescription = null)
Spacer(modifier = Modifier.width(8.dp))
Text("Start Scan")
}
},
dismissButton = {
TextButton(onClick = { personToScan = null }) {
Text("Cancel")
}
}
)
}
}
@Composable
private fun LoadingView() {
Box(
modifier = Modifier.fillMaxSize(),
contentAlignment = Alignment.Center
) {
Column(
horizontalAlignment = Alignment.CenterHorizontally,
verticalArrangement = Arrangement.spacedBy(16.dp)
) {
CircularProgressIndicator()
Text(
text = "Loading trained models...",
style = MaterialTheme.typography.bodyMedium
)
}
}
}
@Composable
private fun EmptyView() {
Box(
modifier = Modifier.fillMaxSize(),
contentAlignment = Alignment.Center
) {
Column(
horizontalAlignment = Alignment.CenterHorizontally,
verticalArrangement = Arrangement.spacedBy(16.dp),
modifier = Modifier.padding(32.dp)
) {
Icon(
Icons.Default.Face,
contentDescription = null,
modifier = Modifier.size(64.dp),
tint = MaterialTheme.colorScheme.primary.copy(alpha = 0.5f)
)
Text(
text = "No trained people yet",
style = MaterialTheme.typography.titleMedium,
fontWeight = FontWeight.Bold
)
Text(
text = "Train a person using 10+ photos to start recognizing faces",
style = MaterialTheme.typography.bodyMedium,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
}
}
}
@Composable
private fun ErrorView(
message: String,
onRetry: () -> Unit
) {
Box(
modifier = Modifier.fillMaxSize(),
contentAlignment = Alignment.Center
) {
Column(
horizontalAlignment = Alignment.CenterHorizontally,
verticalArrangement = Arrangement.spacedBy(16.dp),
modifier = Modifier.padding(32.dp)
) {
Icon(
Icons.Default.Warning,
contentDescription = null,
modifier = Modifier.size(64.dp),
tint = MaterialTheme.colorScheme.error
)
Text(
text = "Error",
style = MaterialTheme.typography.titleMedium,
fontWeight = FontWeight.Bold
)
Text(
text = message,
style = MaterialTheme.typography.bodyMedium,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
Button(onClick = onRetry) {
Icon(Icons.Default.Refresh, contentDescription = null)
Spacer(modifier = Modifier.width(8.dp))
Text("Retry")
}
}
}
}
@Composable
private fun PersonListView(
persons: List<PersonInventoryViewModel.PersonWithStats>,
onDeleteClick: (PersonInventoryViewModel.PersonWithStats) -> Unit,
onScanClick: (PersonInventoryViewModel.PersonWithStats) -> Unit,
onViewPhotos: (PersonInventoryViewModel.PersonWithStats) -> Unit,
scanningState: PersonInventoryViewModel.ScanningState
) {
LazyColumn(
contentPadding = PaddingValues(16.dp),
verticalArrangement = Arrangement.spacedBy(12.dp)
) {
// Summary card
item {
SummaryCard(totalPersons = persons.size)
Spacer(modifier = Modifier.height(8.dp))
}
// Person cards
items(persons) { personWithStats ->
PersonCard(
personWithStats = personWithStats,
onDeleteClick = { onDeleteClick(personWithStats) },
onScanClick = { onScanClick(personWithStats) },
onViewPhotos = { onViewPhotos(personWithStats) },
isScanning = scanningState is PersonInventoryViewModel.ScanningState.Scanning &&
scanningState.personId == personWithStats.person.id
)
}
}
}
@Composable
private fun SummaryCard(totalPersons: Int) {
Card(
modifier = Modifier.fillMaxWidth(),
colors = CardDefaults.cardColors(
containerColor = MaterialTheme.colorScheme.primaryContainer
)
) {
Row(
modifier = Modifier
.fillMaxWidth()
.padding(16.dp),
horizontalArrangement = Arrangement.spacedBy(16.dp),
verticalAlignment = Alignment.CenterVertically
) {
Icon(
Icons.Default.Face,
contentDescription = null,
modifier = Modifier.size(48.dp),
tint = MaterialTheme.colorScheme.primary
)
Column {
Text(
text = "$totalPersons trained ${if (totalPersons == 1) "person" else "people"}",
style = MaterialTheme.typography.titleLarge,
fontWeight = FontWeight.Bold
)
Text(
text = "Face recognition models ready",
style = MaterialTheme.typography.bodyMedium,
color = MaterialTheme.colorScheme.onPrimaryContainer.copy(alpha = 0.7f)
)
}
}
}
}
@Composable
private fun PersonCard(
personWithStats: PersonInventoryViewModel.PersonWithStats,
onDeleteClick: () -> Unit,
onScanClick: () -> Unit,
onViewPhotos: () -> Unit,
isScanning: Boolean
) {
val stats = personWithStats.stats
Card(
modifier = Modifier.fillMaxWidth(),
elevation = CardDefaults.cardElevation(defaultElevation = 2.dp)
) {
Column(
modifier = Modifier
.fillMaxWidth()
.padding(16.dp)
) {
// Header: Name and actions
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.SpaceBetween,
verticalAlignment = Alignment.CenterVertically
) {
Row(
horizontalArrangement = Arrangement.spacedBy(12.dp),
verticalAlignment = Alignment.CenterVertically
) {
Box(
modifier = Modifier
.size(48.dp)
.clip(CircleShape)
.background(MaterialTheme.colorScheme.primary),
contentAlignment = Alignment.Center
) {
Text(
text = personWithStats.person.name.take(1).uppercase(),
style = MaterialTheme.typography.titleLarge,
fontWeight = FontWeight.Bold,
color = MaterialTheme.colorScheme.onPrimary
)
}
Column {
Text(
text = personWithStats.person.name,
style = MaterialTheme.typography.titleMedium,
fontWeight = FontWeight.Bold,
maxLines = 1,
overflow = TextOverflow.Ellipsis
)
Text(
text = "ID: ${personWithStats.person.id.take(8)}",
style = MaterialTheme.typography.bodySmall,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
}
}
IconButton(onClick = onDeleteClick) {
Icon(
Icons.Default.Delete,
contentDescription = "Delete",
tint = MaterialTheme.colorScheme.error
)
}
}
Spacer(modifier = Modifier.height(16.dp))
// Stats grid
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.SpaceEvenly
) {
StatItem(
icon = Icons.Default.PhotoCamera,
label = "Training",
value = "${stats.trainingImageCount}"
)
StatItem(
icon = Icons.Default.AccountBox,
label = "Tagged",
value = "${stats.taggedPhotoCount}"
)
StatItem(
icon = Icons.Default.CheckCircle,
label = "Confidence",
value = "${(stats.averageConfidence * 100).toInt()}%",
valueColor = if (stats.averageConfidence >= 0.8f) {
MaterialTheme.colorScheme.primary
} else if (stats.averageConfidence >= 0.6f) {
MaterialTheme.colorScheme.tertiary
} else {
MaterialTheme.colorScheme.error
}
)
}
Spacer(modifier = Modifier.height(16.dp))
// Last detected
stats.lastDetectedAt?.let { timestamp ->
Surface(
modifier = Modifier.fillMaxWidth(),
color = MaterialTheme.colorScheme.surfaceVariant,
shape = RoundedCornerShape(8.dp)
) {
Row(
modifier = Modifier.padding(12.dp),
horizontalArrangement = Arrangement.spacedBy(8.dp),
verticalAlignment = Alignment.CenterVertically
) {
Icon(
Icons.Default.DateRange,
contentDescription = null,
modifier = Modifier.size(16.dp),
tint = MaterialTheme.colorScheme.onSurfaceVariant
)
Text(
text = "Last detected: ${formatDate(timestamp)}",
style = MaterialTheme.typography.bodySmall,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
}
}
}
Spacer(modifier = Modifier.height(12.dp))
// Action buttons row
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.spacedBy(8.dp)
) {
// Scan Library button (PRIMARY ACTION)
Button(
onClick = onScanClick,
modifier = Modifier.weight(1f),
enabled = !isScanning,
colors = ButtonDefaults.buttonColors(
containerColor = MaterialTheme.colorScheme.primary
)
) {
if (isScanning) {
CircularProgressIndicator(
modifier = Modifier.size(16.dp),
color = MaterialTheme.colorScheme.onPrimary,
strokeWidth = 2.dp
)
} else {
Icon(
Icons.Default.Search,
contentDescription = null,
modifier = Modifier.size(18.dp)
)
}
Spacer(modifier = Modifier.width(8.dp))
Text(if (isScanning) "Scanning..." else "Scan Library")
}
// View photos button
if (stats.taggedPhotoCount > 0) {
OutlinedButton(
onClick = onViewPhotos,
modifier = Modifier.weight(1f)
) {
Icon(
Icons.Default.Photo,
contentDescription = null,
modifier = Modifier.size(18.dp)
)
Spacer(modifier = Modifier.width(8.dp))
Text("View (${stats.taggedPhotoCount})")
}
}
}
}
}
}
@Composable
private fun StatItem(
icon: ImageVector,
label: String,
value: String,
valueColor: Color = MaterialTheme.colorScheme.primary
) {
Column(
horizontalAlignment = Alignment.CenterHorizontally,
verticalArrangement = Arrangement.spacedBy(4.dp)
) {
Icon(
icon,
contentDescription = null,
modifier = Modifier.size(24.dp),
tint = MaterialTheme.colorScheme.onSurfaceVariant
)
Text(
text = value,
style = MaterialTheme.typography.titleMedium,
fontWeight = FontWeight.Bold,
color = valueColor
)
Text(
text = label,
style = MaterialTheme.typography.bodySmall,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
}
}
/**
* Scanning overlay showing progress
*/
@Composable
private fun ScanningOverlay(state: PersonInventoryViewModel.ScanningState.Scanning) {
Box(
modifier = Modifier
.fillMaxSize()
.background(MaterialTheme.colorScheme.surface.copy(alpha = 0.95f)),
contentAlignment = Alignment.Center
) {
Card(
modifier = Modifier
.fillMaxWidth(0.85f)
.padding(24.dp),
elevation = CardDefaults.cardElevation(defaultElevation = 8.dp)
) {
Column(
modifier = Modifier.padding(24.dp),
horizontalAlignment = Alignment.CenterHorizontally,
verticalArrangement = Arrangement.spacedBy(16.dp)
) {
Icon(
Icons.Default.Search,
contentDescription = null,
modifier = Modifier.size(48.dp),
tint = MaterialTheme.colorScheme.primary
)
Text(
text = "Scanning Library",
style = MaterialTheme.typography.titleLarge,
fontWeight = FontWeight.Bold
)
Text(
text = "Finding ${state.personName} in your photos...",
style = MaterialTheme.typography.bodyMedium,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
LinearProgressIndicator(
progress = { state.progress / state.total.toFloat() },
modifier = Modifier.fillMaxWidth(),
)
Text(
text = "${state.progress} / ${state.total} photos scanned",
style = MaterialTheme.typography.bodySmall
)
Text(
text = "${state.facesFound} faces detected",
style = MaterialTheme.typography.labelMedium,
color = MaterialTheme.colorScheme.primary
)
}
}
}
}
private fun formatDate(timestamp: Long): String {
val formatter = SimpleDateFormat("MMM d, yyyy h:mm a", Locale.getDefault())
return formatter.format(Date(timestamp))
}

View File

@@ -0,0 +1,299 @@
package com.placeholder.sherpai2.ui.modelinventory
import android.app.Application
import android.graphics.Bitmap
import android.graphics.BitmapFactory
import android.net.Uri
import androidx.lifecycle.AndroidViewModel
import androidx.lifecycle.viewModelScope
import com.google.mlkit.vision.common.InputImage
import com.google.mlkit.vision.face.FaceDetection
import com.google.mlkit.vision.face.FaceDetectorOptions
import com.placeholder.sherpai2.data.local.entity.PersonEntity
import com.placeholder.sherpai2.data.repository.DetectedFace
import com.placeholder.sherpai2.data.repository.FaceRecognitionRepository
import com.placeholder.sherpai2.data.repository.PersonFaceStats
import com.placeholder.sherpai2.domain.repository.ImageRepository
import dagger.hilt.android.lifecycle.HiltViewModel
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.delay
import kotlinx.coroutines.flow.MutableStateFlow
import kotlinx.coroutines.flow.StateFlow
import kotlinx.coroutines.flow.asStateFlow
import kotlinx.coroutines.flow.first
import kotlinx.coroutines.launch
import kotlinx.coroutines.withContext
import kotlinx.coroutines.tasks.await
import javax.inject.Inject
/**
* PersonInventoryViewModel - Manage trained face models
*
* Features:
* - List all trained persons with stats
* - Delete models
* - SCAN LIBRARY to find person in all photos
* - View sample photos
*/
@HiltViewModel
class PersonInventoryViewModel @Inject constructor(
application: Application,
private val faceRecognitionRepository: FaceRecognitionRepository,
private val imageRepository: ImageRepository
) : AndroidViewModel(application) {
private val _uiState = MutableStateFlow<InventoryUiState>(InventoryUiState.Loading)
val uiState: StateFlow<InventoryUiState> = _uiState.asStateFlow()
private val _scanningState = MutableStateFlow<ScanningState>(ScanningState.Idle)
val scanningState: StateFlow<ScanningState> = _scanningState.asStateFlow()
// ML Kit face detector
private val faceDetector by lazy {
val options = FaceDetectorOptions.Builder()
.setPerformanceMode(FaceDetectorOptions.PERFORMANCE_MODE_ACCURATE)
.setLandmarkMode(FaceDetectorOptions.LANDMARK_MODE_NONE)
.setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_NONE)
.setMinFaceSize(0.15f)
.build()
FaceDetection.getClient(options)
}
data class PersonWithStats(
val person: PersonEntity,
val stats: PersonFaceStats
)
sealed class InventoryUiState {
object Loading : InventoryUiState()
data class Success(val persons: List<PersonWithStats>) : InventoryUiState()
data class Error(val message: String) : InventoryUiState()
}
sealed class ScanningState {
object Idle : ScanningState()
data class Scanning(
val personId: String,
val personName: String,
val progress: Int,
val total: Int,
val facesFound: Int
) : ScanningState()
data class Complete(
val personName: String,
val facesFound: Int,
val imagesScanned: Int
) : ScanningState()
}
init {
loadPersons()
}
/**
* Load all trained persons with their stats
*/
fun loadPersons() {
viewModelScope.launch {
try {
_uiState.value = InventoryUiState.Loading
val persons = faceRecognitionRepository.getPersonsWithFaceModels()
val personsWithStats = persons.mapNotNull { person ->
val stats = faceRecognitionRepository.getPersonFaceStats(person.id)
if (stats != null) {
PersonWithStats(person, stats)
} else {
null
}
}.sortedByDescending { it.stats.taggedPhotoCount }
_uiState.value = InventoryUiState.Success(personsWithStats)
} catch (e: Exception) {
_uiState.value = InventoryUiState.Error(
e.message ?: "Failed to load persons"
)
}
}
}
/**
* Delete a face model
*/
fun deletePerson(personId: String, faceModelId: String) {
viewModelScope.launch {
try {
faceRecognitionRepository.deleteFaceModel(faceModelId)
loadPersons() // Refresh list
} catch (e: Exception) {
_uiState.value = InventoryUiState.Error(
"Failed to delete: ${e.message}"
)
}
}
}
/**
* Scan entire photo library for a specific person
*
* Process:
* 1. Get all images from library
* 2. For each image:
* - Detect faces using ML Kit
* - Generate embeddings for detected faces
* - Compare to person's face model
* - Create PhotoFaceTagEntity if match found
* 3. Update progress throughout
*/
fun scanLibraryForPerson(personId: String, faceModelId: String) {
viewModelScope.launch {
try {
// Get person name for UI
val currentState = _uiState.value
val person = if (currentState is InventoryUiState.Success) {
currentState.persons.find { it.person.id == personId }?.person
} else null
val personName = person?.name ?: "Unknown"
// Get all images from library
val allImages = imageRepository.getAllImages().first()
val totalImages = allImages.size
_scanningState.value = ScanningState.Scanning(
personId = personId,
personName = personName,
progress = 0,
total = totalImages,
facesFound = 0
)
var facesFound = 0
// Scan each image
allImages.forEachIndexed { index, imageWithEverything ->
val image = imageWithEverything.image
// Detect faces in this image
val detectedFaces = detectFacesInImage(image.imageUri)
if (detectedFaces.isNotEmpty()) {
// Scan this image for the person
val tags = faceRecognitionRepository.scanImage(
imageId = image.imageId,
detectedFaces = detectedFaces,
threshold = 0.6f // Slightly lower threshold for library scanning
)
// Count how many faces matched this person
val matchingTags = tags.filter { tag ->
// Check if this tag belongs to our target person's face model
tag.faceModelId == faceModelId
}
facesFound += matchingTags.size
}
// Update progress
_scanningState.value = ScanningState.Scanning(
personId = personId,
personName = personName,
progress = index + 1,
total = totalImages,
facesFound = facesFound
)
}
// Scan complete
_scanningState.value = ScanningState.Complete(
personName = personName,
facesFound = facesFound,
imagesScanned = totalImages
)
// Refresh the list to show updated counts
loadPersons()
// Reset scanning state after 3 seconds
delay(3000)
_scanningState.value = ScanningState.Idle
} catch (e: Exception) {
_scanningState.value = ScanningState.Idle
_uiState.value = InventoryUiState.Error(
"Scan failed: ${e.message}"
)
}
}
}
/**
* Detect faces in an image using ML Kit
*
* @param imageUri URI of the image to scan
* @return List of detected faces with cropped bitmaps
*/
private suspend fun detectFacesInImage(imageUri: String): List<DetectedFace> = withContext(Dispatchers.Default) {
try {
// Load bitmap from URI
val uri = Uri.parse(imageUri)
val inputStream = getApplication<Application>().contentResolver.openInputStream(uri)
val bitmap = BitmapFactory.decodeStream(inputStream)
inputStream?.close()
if (bitmap == null) return@withContext emptyList()
// Create ML Kit input image
val image = InputImage.fromBitmap(bitmap, 0)
// Detect faces (await the Task)
val faces = faceDetector.process(image).await()
// Convert to DetectedFace objects
faces.mapNotNull { face ->
val boundingBox = face.boundingBox
// Crop face from bitmap with bounds checking
val croppedFace = try {
val left = boundingBox.left.coerceAtLeast(0)
val top = boundingBox.top.coerceAtLeast(0)
val width = boundingBox.width().coerceAtMost(bitmap.width - left)
val height = boundingBox.height().coerceAtMost(bitmap.height - top)
if (width > 0 && height > 0) {
Bitmap.createBitmap(bitmap, left, top, width, height)
} else {
null
}
} catch (e: Exception) {
null
}
if (croppedFace != null) {
DetectedFace(
croppedBitmap = croppedFace,
boundingBox = boundingBox
)
} else {
null
}
}
} catch (e: Exception) {
emptyList()
}
}
/**
* Get sample images for a person
*/
suspend fun getPersonImages(personId: String) =
faceRecognitionRepository.getImagesForPerson(personId)
override fun onCleared() {
super.onCleared()
faceDetector.close()
}
}

View File

@@ -0,0 +1,157 @@
package com.placeholder.sherpai2.ui.navigation
import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.automirrored.filled.Label
import androidx.compose.material.icons.filled.*
import androidx.compose.ui.graphics.vector.ImageVector
/**
* AppDestinations - Navigation metadata for drawer UI
*
* Clean, organized structure:
* - Routes for navigation
* - Icons for visual identity
* - Labels for display
* - Descriptions for clarity
* - Grouped by function
*/
sealed class AppDestinations(
val route: String,
val icon: ImageVector,
val label: String,
val description: String = ""
) {
// ==================
// PHOTO BROWSING
// ==================
data object Search : AppDestinations(
route = AppRoutes.SEARCH,
icon = Icons.Default.Search,
label = "Search",
description = "Find photos by tag or person"
)
data object Tour : AppDestinations(
route = AppRoutes.TOUR,
icon = Icons.Default.Place,
label = "Tour",
description = "Browse by location & time"
)
// ImageDetail is not in drawer (internal navigation only)
// ==================
// FACE RECOGNITION
// ==================
data object Inventory : AppDestinations(
route = AppRoutes.INVENTORY,
icon = Icons.Default.Face,
label = "People",
description = "Trained face models"
)
data object Train : AppDestinations(
route = AppRoutes.TRAIN,
icon = Icons.Default.ModelTraining,
label = "Train",
description = "Train new person"
)
data object Models : AppDestinations(
route = AppRoutes.MODELS,
icon = Icons.Default.SmartToy,
label = "Models",
description = "AI model management"
)
// ==================
// ORGANIZATION
// ==================
data object Tags : AppDestinations(
route = AppRoutes.TAGS,
icon = Icons.AutoMirrored.Filled.Label,
label = "Tags",
description = "Manage photo tags"
)
data object Upload : AppDestinations(
route = AppRoutes.UPLOAD,
icon = Icons.Default.UploadFile,
label = "Upload",
description = "Add new photos"
)
// ==================
// SETTINGS
// ==================
data object Settings : AppDestinations(
route = AppRoutes.SETTINGS,
icon = Icons.Default.Settings,
label = "Settings",
description = "App preferences"
)
}
/**
* Organized destination groups for beautiful drawer sections
*/
// Photo browsing section
val photoDestinations = listOf(
AppDestinations.Search,
AppDestinations.Tour
)
// Face recognition section
val faceRecognitionDestinations = listOf(
AppDestinations.Inventory,
AppDestinations.Train,
AppDestinations.Models
)
// Organization section
val organizationDestinations = listOf(
AppDestinations.Tags,
AppDestinations.Upload
)
// Settings (separate, pinned to bottom)
val settingsDestination = AppDestinations.Settings
/**
* All drawer items (excludes Settings which is handled separately)
*/
val allMainDrawerDestinations = photoDestinations + faceRecognitionDestinations + organizationDestinations
/**
* Helper function to get destination by route
* Useful for highlighting current route in drawer
*/
fun getDestinationByRoute(route: String?): AppDestinations? {
return when (route) {
AppRoutes.SEARCH -> AppDestinations.Search
AppRoutes.TOUR -> AppDestinations.Tour
AppRoutes.INVENTORY -> AppDestinations.Inventory
AppRoutes.TRAIN -> AppDestinations.Train
AppRoutes.MODELS -> AppDestinations.Models
AppRoutes.TAGS -> AppDestinations.Tags
AppRoutes.UPLOAD -> AppDestinations.Upload
AppRoutes.SETTINGS -> AppDestinations.Settings
else -> null
}
}
/**
* Legacy support (for backwards compatibility)
* These match your old structure
*/
@Deprecated("Use organized groups instead", ReplaceWith("allMainDrawerDestinations"))
val mainDrawerItems = allMainDrawerDestinations
@Deprecated("Use settingsDestination instead", ReplaceWith("listOf(settingsDestination)"))
val utilityDrawerItems = listOf(settingsDestination)

View File

@@ -0,0 +1,253 @@
package com.placeholder.sherpai2.ui.navigation
import android.net.Uri
import androidx.compose.runtime.Composable
import androidx.compose.runtime.LaunchedEffect
import androidx.compose.runtime.collectAsState
import androidx.compose.runtime.getValue
import androidx.compose.ui.Modifier
import androidx.hilt.navigation.compose.hiltViewModel
import androidx.navigation.NavHostController
import androidx.navigation.NavType
import androidx.navigation.compose.NavHost
import androidx.navigation.compose.composable
import androidx.navigation.navArgument
import com.placeholder.sherpai2.ui.devscreens.DummyScreen
import com.placeholder.sherpai2.ui.imagedetail.ImageDetailScreen
import com.placeholder.sherpai2.ui.modelinventory.PersonInventoryScreen
import com.placeholder.sherpai2.ui.search.SearchScreen
import com.placeholder.sherpai2.ui.search.SearchViewModel
import com.placeholder.sherpai2.ui.tour.TourScreen
import com.placeholder.sherpai2.ui.tour.TourViewModel
import com.placeholder.sherpai2.ui.trainingprep.ImageSelectorScreen
import com.placeholder.sherpai2.ui.trainingprep.ScanResultsScreen
import com.placeholder.sherpai2.ui.trainingprep.ScanningState
import com.placeholder.sherpai2.ui.trainingprep.TrainViewModel
import com.placeholder.sherpai2.ui.trainingprep.TrainingScreen
import java.net.URLDecoder
import java.net.URLEncoder
/**
* AppNavHost - Main navigation graph
*
* Complete flow:
* - Photo browsing (Search, Tour, Detail)
* - Face recognition (Inventory, Train)
* - Organization (Tags, Upload)
* - Settings
*
* Features:
* - URL encoding for safe navigation
* - Proper back stack management
* - State preservation
* - Beautiful placeholders
*/
@Composable
fun AppNavHost(
navController: NavHostController,
modifier: Modifier = Modifier
) {
NavHost(
navController = navController,
startDestination = AppRoutes.SEARCH,
modifier = modifier
) {
// ==========================================
// PHOTO BROWSING
// ==========================================
/**
* SEARCH SCREEN
* Main photo browser with face tag search
*/
composable(AppRoutes.SEARCH) {
val searchViewModel: SearchViewModel = hiltViewModel()
SearchScreen(
searchViewModel = searchViewModel,
onImageClick = { imageUri ->
val encodedUri = URLEncoder.encode(imageUri, "UTF-8")
navController.navigate("${AppRoutes.IMAGE_DETAIL}/$encodedUri")
}
)
}
/**
* IMAGE DETAIL SCREEN
* Single photo view with metadata
*/
composable(
route = "${AppRoutes.IMAGE_DETAIL}/{imageUri}",
arguments = listOf(
navArgument("imageUri") {
type = NavType.StringType
}
)
) { backStackEntry ->
val imageUri = backStackEntry.arguments?.getString("imageUri")
?.let { URLDecoder.decode(it, "UTF-8") }
?: error("imageUri missing from navigation")
ImageDetailScreen(
imageUri = imageUri,
onBack = { navController.popBackStack() }
)
}
/**
* TOUR SCREEN
* Browse photos by location and time
*/
composable(AppRoutes.TOUR) {
val tourViewModel: TourViewModel = hiltViewModel()
TourScreen(
tourViewModel = tourViewModel,
onImageClick = { imageUri ->
val encodedUri = URLEncoder.encode(imageUri, "UTF-8")
navController.navigate("${AppRoutes.IMAGE_DETAIL}/$encodedUri")
}
)
}
// ==========================================
// FACE RECOGNITION SYSTEM
// ==========================================
/**
* PERSON INVENTORY SCREEN
* View all trained face models
*
* Features:
* - List all trained people
* - Show stats (training count, tagged photos, confidence)
* - Delete models
* - View photos containing each person
*/
composable(AppRoutes.INVENTORY) {
PersonInventoryScreen(
onViewPersonPhotos = { personId ->
// Navigate back to search
// TODO: In future, add person filter to search screen
navController.navigate(AppRoutes.SEARCH)
}
)
}
/**
* TRAINING FLOW
* Train new face recognition model
*
* Flow:
* 1. TrainingScreen (select images button)
* 2. ImageSelectorScreen (pick 10+ photos)
* 3. ScanResultsScreen (validation + name input)
* 4. Training completes → navigate to Inventory
*/
composable(AppRoutes.TRAIN) { entry ->
val trainViewModel: TrainViewModel = hiltViewModel()
val uiState by trainViewModel.uiState.collectAsState()
// Get images selected from ImageSelector
val selectedUris = entry.savedStateHandle.get<List<Uri>>("selected_image_uris")
// Start scanning when new images are selected
LaunchedEffect(selectedUris) {
if (selectedUris != null && uiState is ScanningState.Idle) {
trainViewModel.scanAndTagFaces(selectedUris)
entry.savedStateHandle.remove<List<Uri>>("selected_image_uris")
}
}
when (uiState) {
is ScanningState.Idle -> {
// Show start screen with "Select Images" button
TrainingScreen(
onSelectImages = {
navController.navigate(AppRoutes.IMAGE_SELECTOR)
}
)
}
else -> {
// Show validation results and training UI
ScanResultsScreen(
state = uiState,
onFinish = {
// After training, go to inventory to see new person
navController.navigate(AppRoutes.INVENTORY) {
popUpTo(AppRoutes.TRAIN) { inclusive = true }
}
}
)
}
}
}
/**
* IMAGE SELECTOR SCREEN
* Pick images for training (internal screen)
*/
composable(AppRoutes.IMAGE_SELECTOR) {
ImageSelectorScreen(
onImagesSelected = { uris ->
// Pass selected URIs back to Train screen
navController.previousBackStackEntry
?.savedStateHandle
?.set("selected_image_uris", uris)
navController.popBackStack()
}
)
}
/**
* MODELS SCREEN
* AI model management (placeholder)
*/
composable(AppRoutes.MODELS) {
DummyScreen(
title = "AI Models",
subtitle = "Manage face recognition models"
)
}
// ==========================================
// ORGANIZATION
// ==========================================
/**
* TAGS SCREEN
* Manage photo tags (placeholder)
*/
composable(AppRoutes.TAGS) {
DummyScreen(
title = "Tags",
subtitle = "Organize your photos with tags"
)
}
/**
* UPLOAD SCREEN
* Import new photos (placeholder)
*/
composable(AppRoutes.UPLOAD) {
DummyScreen(
title = "Upload",
subtitle = "Add photos to your library"
)
}
// ==========================================
// SETTINGS
// ==========================================
/**
* SETTINGS SCREEN
* App preferences (placeholder)
*/
composable(AppRoutes.SETTINGS) {
DummyScreen(
title = "Settings",
subtitle = "App preferences and configuration"
)
}
}
}

View File

@@ -0,0 +1,32 @@
package com.placeholder.sherpai2.ui.navigation
/**
* Centralized list of navigation routes used by NavHost.
*
* This intentionally mirrors AppDestinations.route
* but exists as a pure navigation concern.
*
* Why:
* - Drawer UI ≠ Navigation system
* - Keeps NavHost decoupled from icons / labels
*/
object AppRoutes {
const val TOUR = "tour"
const val SEARCH = "search"
const val MODELS = "models"
const val INVENTORY = "inv"
const val TRAIN = "train"
const val TAGS = "tags"
const val UPLOAD = "upload"
const val SETTINGS = "settings"
const val IMAGE_DETAIL = "IMAGE_DETAIL"
const val CROP_SCREEN = "CROP_SCREEN"
const val IMAGE_SELECTOR = "Image Selection"
const val TRAINING_SCREEN = "TRAINING_SCREEN"
const val ScanResultsScreen = "First Scan Results"
//const val IMAGE_DETAIL = "IMAGE_DETAIL"
}

View File

@@ -0,0 +1,243 @@
package com.placeholder.sherpai2.ui.presentation
import androidx.compose.foundation.background
import androidx.compose.foundation.layout.*
import androidx.compose.foundation.shape.RoundedCornerShape
import androidx.compose.material3.*
import androidx.compose.runtime.Composable
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.draw.clip
import androidx.compose.ui.graphics.Brush
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.unit.dp
import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.automirrored.filled.Label
import androidx.compose.material.icons.automirrored.filled.List
import androidx.compose.material.icons.filled.*
import com.placeholder.sherpai2.ui.navigation.AppRoutes
/**
* Beautiful app drawer with sections, gradient header, and polish
*/
@OptIn(ExperimentalMaterial3Api::class)
@Composable
fun AppDrawerContent(
currentRoute: String?,
onDestinationClicked: (String) -> Unit
) {
ModalDrawerSheet(
modifier = Modifier.width(300.dp),
drawerContainerColor = MaterialTheme.colorScheme.surface
) {
Column(modifier = Modifier.fillMaxSize()) {
// ===== BEAUTIFUL GRADIENT HEADER =====
Box(
modifier = Modifier
.fillMaxWidth()
.background(
Brush.verticalGradient(
colors = listOf(
MaterialTheme.colorScheme.primaryContainer,
MaterialTheme.colorScheme.surface
)
)
)
.padding(24.dp)
) {
Column(
verticalArrangement = Arrangement.spacedBy(8.dp)
) {
// App icon/logo area
Surface(
modifier = Modifier.size(56.dp),
shape = RoundedCornerShape(16.dp),
color = MaterialTheme.colorScheme.primary,
shadowElevation = 4.dp
) {
Box(contentAlignment = Alignment.Center) {
Icon(
Icons.Default.Face,
contentDescription = null,
modifier = Modifier.size(32.dp),
tint = MaterialTheme.colorScheme.onPrimary
)
}
}
Text(
"SherpAI",
style = MaterialTheme.typography.headlineMedium,
fontWeight = FontWeight.Bold,
color = MaterialTheme.colorScheme.onSurface
)
Text(
"Face Recognition System",
style = MaterialTheme.typography.bodyMedium,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
}
}
Spacer(modifier = Modifier.height(8.dp))
// ===== NAVIGATION SECTIONS =====
Column(
modifier = Modifier
.fillMaxWidth()
.weight(1f)
.padding(horizontal = 12.dp),
verticalArrangement = Arrangement.spacedBy(4.dp)
) {
// Photos Section
DrawerSection(title = "Photos")
val photoItems = listOf(
DrawerItem(AppRoutes.SEARCH, "Search", Icons.Default.Search, "Find photos by tag or person"),
DrawerItem(AppRoutes.TOUR, "Tour", Icons.Default.Place, "Browse by location & time")
)
photoItems.forEach { item ->
DrawerNavigationItem(
item = item,
selected = item.route == currentRoute,
onClick = { onDestinationClicked(item.route) }
)
}
Spacer(modifier = Modifier.height(8.dp))
// Face Recognition Section
DrawerSection(title = "Face Recognition")
val faceItems = listOf(
DrawerItem(AppRoutes.INVENTORY, "People", Icons.Default.Face, "Trained face models"),
DrawerItem(AppRoutes.TRAIN, "Train", Icons.Default.ModelTraining, "Train new person"),
DrawerItem(AppRoutes.MODELS, "Models", Icons.Default.SmartToy, "AI model management")
)
faceItems.forEach { item ->
DrawerNavigationItem(
item = item,
selected = item.route == currentRoute,
onClick = { onDestinationClicked(item.route) }
)
}
Spacer(modifier = Modifier.height(8.dp))
// Organization Section
DrawerSection(title = "Organization")
val orgItems = listOf(
DrawerItem(AppRoutes.TAGS, "Tags", Icons.AutoMirrored.Filled.Label, "Manage photo tags"),
DrawerItem(AppRoutes.UPLOAD, "Upload", Icons.Default.UploadFile, "Add new photos")
)
orgItems.forEach { item ->
DrawerNavigationItem(
item = item,
selected = item.route == currentRoute,
onClick = { onDestinationClicked(item.route) }
)
}
Spacer(modifier = Modifier.weight(1f))
// Settings at bottom
HorizontalDivider(
modifier = Modifier.padding(vertical = 8.dp),
color = MaterialTheme.colorScheme.outlineVariant
)
DrawerNavigationItem(
item = DrawerItem(
AppRoutes.SETTINGS,
"Settings",
Icons.Default.Settings,
"App preferences"
),
selected = AppRoutes.SETTINGS == currentRoute,
onClick = { onDestinationClicked(AppRoutes.SETTINGS) }
)
Spacer(modifier = Modifier.height(8.dp))
}
}
}
}
/**
* Section header in drawer
*/
@Composable
private fun DrawerSection(title: String) {
Text(
text = title,
style = MaterialTheme.typography.labelMedium,
fontWeight = FontWeight.Bold,
color = MaterialTheme.colorScheme.primary,
modifier = Modifier.padding(horizontal = 16.dp, vertical = 8.dp)
)
}
/**
* Individual navigation item with icon, label, and subtitle
*/
@Composable
private fun DrawerNavigationItem(
item: DrawerItem,
selected: Boolean,
onClick: () -> Unit
) {
NavigationDrawerItem(
label = {
Column(verticalArrangement = Arrangement.spacedBy(2.dp)) {
Text(
text = item.label,
style = MaterialTheme.typography.bodyLarge,
fontWeight = if (selected) FontWeight.SemiBold else FontWeight.Normal
)
item.subtitle?.let {
Text(
text = it,
style = MaterialTheme.typography.bodySmall,
color = MaterialTheme.colorScheme.onSurfaceVariant.copy(alpha = 0.7f)
)
}
}
},
icon = {
Icon(
item.icon,
contentDescription = item.label,
modifier = Modifier.size(24.dp)
)
},
selected = selected,
onClick = onClick,
modifier = Modifier
.padding(NavigationDrawerItemDefaults.ItemPadding)
.clip(RoundedCornerShape(12.dp)),
colors = NavigationDrawerItemDefaults.colors(
selectedContainerColor = MaterialTheme.colorScheme.primaryContainer,
selectedIconColor = MaterialTheme.colorScheme.primary,
selectedTextColor = MaterialTheme.colorScheme.onPrimaryContainer,
unselectedContainerColor = Color.Transparent
)
)
}
/**
* Data class for drawer items
*/
private data class DrawerItem(
val route: String,
val label: String,
val icon: androidx.compose.ui.graphics.vector.ImageVector,
val subtitle: String? = null
)

View File

@@ -0,0 +1,68 @@
package com.placeholder.sherpai2.ui.presentation
import androidx.compose.foundation.layout.padding
import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.filled.Menu
import androidx.compose.material3.*
import androidx.compose.runtime.*
import androidx.compose.ui.Modifier
import androidx.navigation.NavController
import androidx.navigation.compose.currentBackStackEntryAsState
import androidx.navigation.compose.rememberNavController
import com.placeholder.sherpai2.ui.navigation.AppNavHost
import com.placeholder.sherpai2.ui.navigation.AppRoutes
import kotlinx.coroutines.launch
@OptIn(ExperimentalMaterial3Api::class)
@Composable
fun MainScreen() {
val drawerState = rememberDrawerState(initialValue = DrawerValue.Closed)
val scope = rememberCoroutineScope()
// Navigation controller for NavHost
val navController = rememberNavController()
// Track current backstack entry to update top bar title dynamically
val navBackStackEntry by navController.currentBackStackEntryAsState()
val currentRoute = navBackStackEntry?.destination?.route ?: AppRoutes.SEARCH
// Drawer content for navigation
ModalNavigationDrawer(
drawerState = drawerState,
drawerContent = {
AppDrawerContent(
currentRoute = currentRoute,
onDestinationClicked = { route ->
scope.launch {
drawerState.close()
if (route != currentRoute) {
navController.navigate(route) {
// Avoid multiple copies of the same destination
launchSingleTop = true
}
}
}
}
)
},
) {
// Main scaffold with top bar
Scaffold(
topBar = {
TopAppBar(
title = { Text(currentRoute.replaceFirstChar { it.uppercase() }) },
navigationIcon = {
IconButton(onClick = { scope.launch { drawerState.open() } }) {
Icon(Icons.Filled.Menu, contentDescription = "Open Drawer")
}
}
)
}
) { paddingValues ->
AppNavHost(
navController = navController,
modifier = Modifier.padding(paddingValues)
)
}
}
}

View File

@@ -0,0 +1,405 @@
package com.placeholder.sherpai2.ui.search
import androidx.compose.foundation.background
import androidx.compose.foundation.layout.*
import androidx.compose.foundation.lazy.grid.*
import androidx.compose.foundation.shape.RoundedCornerShape
import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.filled.*
import androidx.compose.material3.*
import androidx.compose.runtime.*
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.draw.clip
import androidx.compose.ui.graphics.Brush
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.text.style.TextOverflow
import androidx.compose.ui.unit.dp
import androidx.lifecycle.compose.collectAsStateWithLifecycle
import com.placeholder.sherpai2.ui.search.components.ImageGridItem
/**
* Beautiful SearchScreen with face tag display
*
* Polish improvements:
* - Gradient header
* - Better stats card
* - Smooth animations
* - Enhanced visual hierarchy
*/
@OptIn(ExperimentalMaterial3Api::class)
@Composable
fun SearchScreen(
modifier: Modifier = Modifier,
searchViewModel: SearchViewModel,
onImageClick: (String) -> Unit
) {
var query by remember { mutableStateOf("") }
val images by searchViewModel
.searchImagesByTag(query)
.collectAsStateWithLifecycle(initialValue = emptyList())
Scaffold(
topBar = {
// Gradient header
Box(
modifier = Modifier
.fillMaxWidth()
.background(
Brush.verticalGradient(
colors = listOf(
MaterialTheme.colorScheme.primaryContainer,
MaterialTheme.colorScheme.surface
)
)
)
) {
Column(
modifier = Modifier
.fillMaxWidth()
.padding(16.dp)
) {
// Title
Row(
verticalAlignment = Alignment.CenterVertically,
horizontalArrangement = Arrangement.spacedBy(12.dp)
) {
Surface(
shape = RoundedCornerShape(12.dp),
color = MaterialTheme.colorScheme.primary,
shadowElevation = 2.dp,
modifier = Modifier.size(48.dp)
) {
Box(contentAlignment = Alignment.Center) {
Icon(
Icons.Default.Search,
contentDescription = null,
tint = MaterialTheme.colorScheme.onPrimary,
modifier = Modifier.size(28.dp)
)
}
}
Column {
Text(
text = "Search Photos",
style = MaterialTheme.typography.headlineMedium,
fontWeight = FontWeight.Bold
)
Text(
text = "Find by tag or person",
style = MaterialTheme.typography.bodyMedium,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
}
}
Spacer(modifier = Modifier.height(16.dp))
// Search bar
OutlinedTextField(
value = query,
onValueChange = { query = it },
label = { Text("Search by tag") },
leadingIcon = {
Icon(Icons.Default.Search, contentDescription = null)
},
trailingIcon = {
if (query.isNotEmpty()) {
IconButton(onClick = { query = "" }) {
Icon(Icons.Default.Clear, contentDescription = "Clear")
}
}
},
modifier = Modifier.fillMaxWidth(),
singleLine = true,
shape = RoundedCornerShape(16.dp),
colors = OutlinedTextFieldDefaults.colors(
focusedContainerColor = MaterialTheme.colorScheme.surface,
unfocusedContainerColor = MaterialTheme.colorScheme.surface
)
)
}
}
}
) { paddingValues ->
Column(
modifier = modifier
.fillMaxSize()
.padding(paddingValues)
) {
// Stats bar
if (images.isNotEmpty()) {
StatsBar(images = images)
}
// Results grid
if (images.isEmpty() && query.isBlank()) {
EmptySearchState()
} else if (images.isEmpty() && query.isNotBlank()) {
NoResultsState(query = query)
} else {
LazyVerticalGrid(
columns = GridCells.Adaptive(120.dp),
contentPadding = PaddingValues(12.dp),
verticalArrangement = Arrangement.spacedBy(12.dp),
horizontalArrangement = Arrangement.spacedBy(12.dp),
modifier = Modifier.fillMaxSize()
) {
items(
items = images,
key = { it.image.imageId }
) { imageWithFaceTags ->
ImageWithFaceTagsCard(
imageWithFaceTags = imageWithFaceTags,
onImageClick = onImageClick
)
}
}
}
}
}
}
/**
* Pretty stats bar showing results summary
*/
@Composable
private fun StatsBar(images: List<ImageWithFaceTags>) {
val totalFaces = images.sumOf { it.faceTags.size }
val uniquePersons = images.flatMap { it.persons }.distinctBy { it.id }.size
Surface(
modifier = Modifier
.fillMaxWidth()
.padding(12.dp),
color = MaterialTheme.colorScheme.secondaryContainer.copy(alpha = 0.5f),
shape = RoundedCornerShape(16.dp),
shadowElevation = 2.dp
) {
Row(
modifier = Modifier.padding(16.dp),
horizontalArrangement = Arrangement.SpaceEvenly,
verticalAlignment = Alignment.CenterVertically
) {
StatBadge(
icon = Icons.Default.Photo,
label = "Images",
value = images.size.toString()
)
VerticalDivider(
modifier = Modifier.height(40.dp),
color = MaterialTheme.colorScheme.outline.copy(alpha = 0.3f)
)
StatBadge(
icon = Icons.Default.Face,
label = "Faces",
value = totalFaces.toString()
)
if (uniquePersons > 0) {
VerticalDivider(
modifier = Modifier.height(40.dp),
color = MaterialTheme.colorScheme.outline.copy(alpha = 0.3f)
)
StatBadge(
icon = Icons.Default.People,
label = "People",
value = uniquePersons.toString()
)
}
}
}
}
@Composable
private fun StatBadge(
icon: androidx.compose.ui.graphics.vector.ImageVector,
label: String,
value: String
) {
Column(
horizontalAlignment = Alignment.CenterHorizontally,
verticalArrangement = Arrangement.spacedBy(4.dp)
) {
Icon(
icon,
contentDescription = null,
modifier = Modifier.size(24.dp),
tint = MaterialTheme.colorScheme.primary
)
Text(
text = value,
style = MaterialTheme.typography.titleLarge,
fontWeight = FontWeight.Bold,
color = MaterialTheme.colorScheme.primary
)
Text(
text = label,
style = MaterialTheme.typography.bodySmall,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
}
}
/**
* Empty state when no search query
*/
@Composable
private fun EmptySearchState() {
Box(
modifier = Modifier.fillMaxSize(),
contentAlignment = Alignment.Center
) {
Column(
horizontalAlignment = Alignment.CenterHorizontally,
verticalArrangement = Arrangement.spacedBy(16.dp),
modifier = Modifier.padding(32.dp)
) {
Icon(
Icons.Default.Search,
contentDescription = null,
modifier = Modifier.size(80.dp),
tint = MaterialTheme.colorScheme.primary.copy(alpha = 0.3f)
)
Text(
text = "Search your photos",
style = MaterialTheme.typography.titleLarge,
fontWeight = FontWeight.Bold
)
Text(
text = "Enter a tag to find photos",
style = MaterialTheme.typography.bodyMedium,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
}
}
}
/**
* No results state
*/
@Composable
private fun NoResultsState(query: String) {
Box(
modifier = Modifier.fillMaxSize(),
contentAlignment = Alignment.Center
) {
Column(
horizontalAlignment = Alignment.CenterHorizontally,
verticalArrangement = Arrangement.spacedBy(16.dp),
modifier = Modifier.padding(32.dp)
) {
Icon(
Icons.Default.SearchOff,
contentDescription = null,
modifier = Modifier.size(80.dp),
tint = MaterialTheme.colorScheme.error.copy(alpha = 0.5f)
)
Text(
text = "No results",
style = MaterialTheme.typography.titleLarge,
fontWeight = FontWeight.Bold
)
Text(
text = "No photos found for \"$query\"",
style = MaterialTheme.typography.bodyMedium,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
}
}
}
/**
* Beautiful card showing image with face tags
*/
@Composable
private fun ImageWithFaceTagsCard(
imageWithFaceTags: ImageWithFaceTags,
onImageClick: (String) -> Unit
) {
Card(
modifier = Modifier.fillMaxWidth(),
shape = RoundedCornerShape(16.dp),
elevation = CardDefaults.cardElevation(defaultElevation = 4.dp)
) {
Column(
modifier = Modifier.fillMaxWidth()
) {
// Image
ImageGridItem(
image = imageWithFaceTags.image,
onClick = { onImageClick(imageWithFaceTags.image.imageId) }
)
// Face tags
if (imageWithFaceTags.persons.isNotEmpty()) {
Surface(
color = MaterialTheme.colorScheme.primaryContainer.copy(alpha = 0.5f),
modifier = Modifier.fillMaxWidth()
) {
Column(
modifier = Modifier.padding(8.dp),
verticalArrangement = Arrangement.spacedBy(4.dp)
) {
imageWithFaceTags.persons.take(3).forEachIndexed { index, person ->
Row(
horizontalArrangement = Arrangement.spacedBy(6.dp),
verticalAlignment = Alignment.CenterVertically
) {
Icon(
Icons.Default.Face,
contentDescription = null,
modifier = Modifier.size(16.dp),
tint = MaterialTheme.colorScheme.primary
)
Text(
text = person.name,
style = MaterialTheme.typography.bodySmall,
fontWeight = FontWeight.Medium,
maxLines = 1,
overflow = TextOverflow.Ellipsis,
modifier = Modifier.weight(1f)
)
if (index < imageWithFaceTags.faceTags.size) {
val confidence = (imageWithFaceTags.faceTags[index].confidence * 100).toInt()
Surface(
shape = RoundedCornerShape(8.dp),
color = if (confidence >= 80) {
MaterialTheme.colorScheme.primary.copy(alpha = 0.2f)
} else {
MaterialTheme.colorScheme.tertiary.copy(alpha = 0.2f)
}
) {
Text(
text = "$confidence%",
style = MaterialTheme.typography.labelSmall,
modifier = Modifier.padding(horizontal = 6.dp, vertical = 2.dp),
fontWeight = FontWeight.Bold
)
}
}
}
}
if (imageWithFaceTags.persons.size > 3) {
Text(
text = "+${imageWithFaceTags.persons.size - 3} more",
style = MaterialTheme.typography.labelSmall,
color = MaterialTheme.colorScheme.primary,
fontWeight = FontWeight.Medium
)
}
}
}
}
}
}
}

View File

@@ -0,0 +1,70 @@
package com.placeholder.sherpai2.ui.search
import androidx.lifecycle.ViewModel
import com.placeholder.sherpai2.data.local.entity.ImageEntity
import com.placeholder.sherpai2.data.local.entity.PersonEntity
import com.placeholder.sherpai2.data.local.entity.PhotoFaceTagEntity
import com.placeholder.sherpai2.data.repository.FaceRecognitionRepository
import com.placeholder.sherpai2.domain.repository.ImageRepository
import dagger.hilt.android.lifecycle.HiltViewModel
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.map
import javax.inject.Inject
/**
* SearchViewModel
*
* CLEAN IMPLEMENTATION:
* - Properly handles Flow types
* - Fetches face tags for each image
* - Returns combined data structure
*/
@HiltViewModel
class SearchViewModel @Inject constructor(
private val imageRepository: ImageRepository,
private val faceRecognitionRepository: FaceRecognitionRepository
) : ViewModel() {
/**
* Search images by tag with face recognition data.
*
* RETURNS: Flow<List<ImageWithFaceTags>>
* Each image includes its detected faces and person names
*/
fun searchImagesByTag(tag: String): Flow<List<ImageWithFaceTags>> {
val imagesFlow = if (tag.isBlank()) {
imageRepository.getAllImages()
} else {
imageRepository.findImagesByTag(tag)
}
// Transform Flow to include face recognition data
return imagesFlow.map { imagesList ->
imagesList.map { imageWithEverything ->
// Get face tags with person info for this image
val tagsWithPersons = faceRecognitionRepository.getFaceTagsWithPersons(
imageWithEverything.image.imageId
)
ImageWithFaceTags(
image = imageWithEverything.image,
faceTags = tagsWithPersons.map { it.first },
persons = tagsWithPersons.map { it.second }
)
}
}
}
}
/**
* Data class containing image with face recognition data
*
* @property image The image entity
* @property faceTags Face tags detected in this image
* @property persons Person entities (parallel to faceTags)
*/
data class ImageWithFaceTags(
val image: ImageEntity,
val faceTags: List<PhotoFaceTagEntity>,
val persons: List<PersonEntity>
)

View File

@@ -0,0 +1,30 @@
package com.placeholder.sherpai2.ui.search.components
import androidx.compose.foundation.Image
import androidx.compose.foundation.layout.aspectRatio
import androidx.compose.foundation.layout.fillMaxWidth
import androidx.compose.runtime.Composable
import androidx.compose.ui.Modifier
import coil.compose.rememberAsyncImagePainter
import com.placeholder.sherpai2.data.local.entity.ImageEntity
/**
* ImageGridItem
*
* Minimal thumbnail preview.
* No click handling yet.
*/
@Composable
fun ImageGridItem(
image: ImageEntity,
modifier: Modifier = Modifier,
onClick: (() -> Unit)? = null
) {
Image(
painter = rememberAsyncImagePainter(image.imageUri),
contentDescription = null,
modifier = Modifier
.fillMaxWidth()
.aspectRatio(1f)
)
}

View File

@@ -0,0 +1,77 @@
// TourScreen.kt
package com.placeholder.sherpai2.ui.tour
import androidx.compose.foundation.background
import androidx.compose.foundation.layout.*
import androidx.compose.foundation.lazy.LazyColumn
import androidx.compose.foundation.lazy.items
import androidx.compose.foundation.rememberScrollState
import androidx.compose.foundation.verticalScroll
import androidx.compose.material3.*
import androidx.compose.runtime.*
import androidx.compose.ui.Modifier
import androidx.compose.ui.unit.dp
import androidx.hilt.navigation.compose.hiltViewModel
import com.placeholder.sherpai2.data.local.model.ImageWithEverything
@Composable
fun TourScreen(tourViewModel: TourViewModel = hiltViewModel(), onImageClick: (String) -> Unit) {
val images by tourViewModel.recentImages.collectAsState()
Column(modifier = Modifier.fillMaxSize()) {
// Header with image count
Text(
text = "Gallery (${images.size} images)",
style = MaterialTheme.typography.titleLarge,
modifier = Modifier.padding(16.dp)
)
LazyColumn(
modifier = Modifier.fillMaxSize(),
contentPadding = PaddingValues(16.dp)
) {
items(images) { image ->
ImageCard(image)
Spacer(modifier = Modifier.height(12.dp))
}
}
}
}
@Composable
fun ImageCard(image: ImageWithEverything) {
Card(modifier = Modifier.fillMaxWidth(), elevation = CardDefaults.cardElevation(4.dp)) {
Column(modifier = Modifier.padding(12.dp)) {
Text(text = image.tags.toString(), style = MaterialTheme.typography.bodyMedium)
// Tags row with placeholders if fewer than 3
Row(modifier = Modifier.padding(top = 8.dp)) {
val tags = image.tags.map { it.tagId } // adjust depending on your entity
tags.forEach { tag ->
TagComposable(tag)
}
repeat(3 - tags.size.coerceAtMost(3)) {
TagComposable("") // empty placeholder
}
}
}
}
}
@Composable
fun TagComposable(tag: String) {
Box(
modifier = Modifier
.padding(end = 4.dp)
.height(24.dp)
.widthIn(min = 40.dp)
.background(MaterialTheme.colorScheme.primaryContainer, MaterialTheme.shapes.small),
contentAlignment = androidx.compose.ui.Alignment.Center
) {
Text(
text = if (tag.isNotBlank()) tag else " ",
style = MaterialTheme.typography.labelSmall,
modifier = Modifier.padding(horizontal = 6.dp)
)
}
}

View File

@@ -0,0 +1,39 @@
// TourViewModel.kt
package com.placeholder.sherpai2.ui.tour
import androidx.lifecycle.ViewModel
import androidx.lifecycle.viewModelScope
import com.placeholder.sherpai2.domain.repository.ImageRepository
import com.placeholder.sherpai2.data.local.model.ImageWithEverything
import dagger.hilt.android.lifecycle.HiltViewModel
import kotlinx.coroutines.flow.*
import kotlinx.coroutines.launch
import javax.inject.Inject
@HiltViewModel
class TourViewModel @Inject constructor(
private val imageRepository: ImageRepository
) : ViewModel() {
// Expose recent images as StateFlow
private val _recentImages = MutableStateFlow<List<ImageWithEverything>>(emptyList())
val recentImages: StateFlow<List<ImageWithEverything>> = _recentImages.asStateFlow()
init {
loadRecentImages()
}
private fun loadRecentImages(limit: Int = 100) {
viewModelScope.launch {
imageRepository.getRecentImages(limit)
.catch { e ->
println("TourViewModel: error fetching images: $e")
_recentImages.value = emptyList()
}
.collect { images ->
println("TourViewModel: fetched ${images.size} images")
_recentImages.value = images
}
}
}
}

View File

@@ -0,0 +1,159 @@
package com.placeholder.sherpai2.ui.trainingprep
import android.content.Context
import android.graphics.Bitmap
import android.graphics.BitmapFactory
import android.net.Uri
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.withContext
import java.io.InputStream
/**
* Helper class for detecting duplicate or near-duplicate images using perceptual hashing
*/
class DuplicateImageDetector(private val context: Context) {
data class DuplicateCheckResult(
val hasDuplicates: Boolean,
val duplicateGroups: List<DuplicateGroup>,
val uniqueImageCount: Int
)
data class DuplicateGroup(
val images: List<Uri>,
val similarity: Double
)
private data class ImageHash(
val uri: Uri,
val hash: Long
)
/**
* Check for duplicate images in the provided list
*/
suspend fun checkForDuplicates(
uris: List<Uri>,
similarityThreshold: Double = 0.95
): DuplicateCheckResult = withContext(Dispatchers.Default) {
if (uris.size < 2) {
return@withContext DuplicateCheckResult(
hasDuplicates = false,
duplicateGroups = emptyList(),
uniqueImageCount = uris.size
)
}
// Compute perceptual hash for each image
val imageHashes = uris.mapNotNull { uri ->
try {
val bitmap = loadBitmap(uri)
bitmap?.let {
val hash = computePerceptualHash(it)
ImageHash(uri, hash)
}
} catch (e: Exception) {
null
}
}
// Find duplicate groups
val duplicateGroups = mutableListOf<DuplicateGroup>()
val processed = mutableSetOf<Uri>()
for (i in imageHashes.indices) {
if (imageHashes[i].uri in processed) continue
val currentGroup = mutableListOf(imageHashes[i].uri)
for (j in i + 1 until imageHashes.size) {
if (imageHashes[j].uri in processed) continue
val similarity = calculateSimilarity(imageHashes[i].hash, imageHashes[j].hash)
if (similarity >= similarityThreshold) {
currentGroup.add(imageHashes[j].uri)
processed.add(imageHashes[j].uri)
}
}
if (currentGroup.size > 1) {
duplicateGroups.add(
DuplicateGroup(
images = currentGroup,
similarity = 1.0
)
)
processed.addAll(currentGroup)
}
}
DuplicateCheckResult(
hasDuplicates = duplicateGroups.isNotEmpty(),
duplicateGroups = duplicateGroups,
uniqueImageCount = uris.size - duplicateGroups.sumOf { it.images.size - 1 }
)
}
/**
* Compute perceptual hash using difference hash (dHash) algorithm
*/
private fun computePerceptualHash(bitmap: Bitmap): Long {
// Resize to 9x8
val resized = Bitmap.createScaledBitmap(bitmap, 9, 8, false)
var hash = 0L
var bitIndex = 0
for (y in 0 until 8) {
for (x in 0 until 8) {
val leftPixel = resized.getPixel(x, y)
val rightPixel = resized.getPixel(x + 1, y)
val leftGray = toGrayscale(leftPixel)
val rightGray = toGrayscale(rightPixel)
if (leftGray > rightGray) {
hash = hash or (1L shl bitIndex)
}
bitIndex++
}
}
resized.recycle()
return hash
}
/**
* Convert RGB pixel to grayscale value
*/
private fun toGrayscale(pixel: Int): Int {
val r = (pixel shr 16) and 0xFF
val g = (pixel shr 8) and 0xFF
val b = pixel and 0xFF
return (0.299 * r + 0.587 * g + 0.114 * b).toInt()
}
/**
* Calculate similarity between two hashes
*/
private fun calculateSimilarity(hash1: Long, hash2: Long): Double {
val xor = hash1 xor hash2
val hammingDistance = xor.countOneBits()
return 1.0 - (hammingDistance / 64.0)
}
/**
* Load bitmap from URI
*/
private fun loadBitmap(uri: Uri): Bitmap? {
return try {
val inputStream: InputStream? = context.contentResolver.openInputStream(uri)
BitmapFactory.decodeStream(inputStream)?.also {
inputStream?.close()
}
} catch (e: Exception) {
null
}
}
}

View File

@@ -0,0 +1,435 @@
package com.placeholder.sherpai2.ui.trainingprep
import android.graphics.Bitmap
import android.graphics.BitmapFactory
import android.graphics.Rect
import android.net.Uri
import androidx.compose.foundation.BorderStroke
import androidx.compose.foundation.Canvas
import androidx.compose.foundation.background
import androidx.compose.foundation.border
import androidx.compose.foundation.clickable
import androidx.compose.foundation.layout.*
import androidx.compose.foundation.shape.CircleShape
import androidx.compose.foundation.shape.RoundedCornerShape
import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.filled.CheckCircle
import androidx.compose.material.icons.filled.Close
import androidx.compose.material3.*
import androidx.compose.runtime.*
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.draw.clip
import androidx.compose.ui.geometry.Offset
import androidx.compose.ui.geometry.Size
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.graphics.asImageBitmap
import androidx.compose.ui.graphics.drawscope.Stroke
import androidx.compose.ui.layout.ContentScale
import androidx.compose.ui.platform.LocalContext
import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.unit.dp
import androidx.compose.ui.window.Dialog
import androidx.compose.ui.window.DialogProperties
import coil.compose.AsyncImage
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.withContext
/**
* Dialog for selecting a face from multiple detected faces
*/
@Composable
fun FacePickerDialog(
result: FaceDetectionHelper.FaceDetectionResult,
onDismiss: () -> Unit,
onFaceSelected: (Int, Bitmap) -> Unit // faceIndex, croppedFaceBitmap
) {
val context = LocalContext.current
var selectedFaceIndex by remember { mutableStateOf<Int?>(null) }
var croppedFaces by remember { mutableStateOf<List<Bitmap>>(emptyList()) }
var isLoading by remember { mutableStateOf(true) }
// Load and crop all faces
LaunchedEffect(result) {
isLoading = true
croppedFaces = withContext(Dispatchers.IO) {
val bitmap = loadBitmapFromUri(context, result.uri)
bitmap?.let { bmp ->
result.faceBounds.map { bounds ->
cropFaceFromBitmap(bmp, bounds)
}
} ?: emptyList()
}
isLoading = false
// Auto-select the first (largest) face
if (croppedFaces.isNotEmpty()) {
selectedFaceIndex = 0
}
}
Dialog(
onDismissRequest = onDismiss,
properties = DialogProperties(usePlatformDefaultWidth = false)
) {
Card(
modifier = Modifier
.fillMaxWidth(0.95f)
.fillMaxHeight(0.9f),
shape = RoundedCornerShape(16.dp)
) {
Column(
modifier = Modifier
.fillMaxSize()
.padding(20.dp),
verticalArrangement = Arrangement.spacedBy(16.dp)
) {
// Header
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.SpaceBetween,
verticalAlignment = Alignment.CenterVertically
) {
Column {
Text(
text = "Pick a Face",
style = MaterialTheme.typography.headlineSmall,
fontWeight = FontWeight.Bold
)
Text(
text = "${result.faceCount} faces detected",
style = MaterialTheme.typography.bodyMedium,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
}
IconButton(onClick = onDismiss) {
Icon(Icons.Default.Close, "Close")
}
}
// Instruction
Text(
text = "Tap a face below to select it for training:",
style = MaterialTheme.typography.bodyMedium
)
if (isLoading) {
// Loading state
Box(
modifier = Modifier
.fillMaxWidth()
.weight(1f),
contentAlignment = Alignment.Center
) {
CircularProgressIndicator()
}
} else {
// Original image with face boxes overlay
Card(
modifier = Modifier
.fillMaxWidth()
.weight(1f),
colors = CardDefaults.cardColors(
containerColor = MaterialTheme.colorScheme.surfaceVariant
)
) {
Box(
modifier = Modifier.fillMaxSize(),
contentAlignment = Alignment.Center
) {
FaceOverlayImage(
imageUri = result.uri,
faceBounds = result.faceBounds,
selectedFaceIndex = selectedFaceIndex,
onFaceClick = { index ->
selectedFaceIndex = index
}
)
}
}
// Face previews grid
Text(
text = "Preview (tap to select):",
style = MaterialTheme.typography.bodyMedium,
fontWeight = FontWeight.SemiBold
)
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.spacedBy(12.dp)
) {
croppedFaces.forEachIndexed { index, faceBitmap ->
FacePreviewCard(
faceBitmap = faceBitmap,
index = index,
isSelected = selectedFaceIndex == index,
onClick = { selectedFaceIndex = index },
modifier = Modifier.weight(1f)
)
}
}
}
// Action buttons
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.spacedBy(12.dp)
) {
OutlinedButton(
onClick = onDismiss,
modifier = Modifier.weight(1f)
) {
Text("Cancel")
}
Button(
onClick = {
selectedFaceIndex?.let { index ->
if (index < croppedFaces.size) {
onFaceSelected(index, croppedFaces[index])
}
}
},
modifier = Modifier.weight(1f),
enabled = selectedFaceIndex != null && !isLoading
) {
Icon(Icons.Default.CheckCircle, contentDescription = null)
Spacer(modifier = Modifier.width(8.dp))
Text("Use This Face")
}
}
}
}
}
}
/**
* Image with interactive face boxes overlay
*/
@Composable
private fun FaceOverlayImage(
imageUri: Uri,
faceBounds: List<Rect>,
selectedFaceIndex: Int?,
onFaceClick: (Int) -> Unit
) {
var imageSize by remember { mutableStateOf(Size.Zero) }
var imageBounds by remember { mutableStateOf(Rect()) }
Box(
modifier = Modifier.fillMaxSize()
) {
// Original image
AsyncImage(
model = imageUri,
contentDescription = "Original image",
modifier = Modifier
.fillMaxSize()
.padding(8.dp),
contentScale = ContentScale.Fit,
onSuccess = { state ->
val drawable = state.result.drawable
imageBounds = Rect(0, 0, drawable.intrinsicWidth, drawable.intrinsicHeight)
}
)
// Face boxes overlay
Canvas(
modifier = Modifier
.fillMaxSize()
.padding(8.dp)
) {
if (imageBounds.width() > 0 && imageBounds.height() > 0) {
// Calculate scale to fit image in canvas
val scaleX = size.width / imageBounds.width()
val scaleY = size.height / imageBounds.height()
val scale = minOf(scaleX, scaleY)
// Calculate offset to center image
val scaledWidth = imageBounds.width() * scale
val scaledHeight = imageBounds.height() * scale
val offsetX = (size.width - scaledWidth) / 2
val offsetY = (size.height - scaledHeight) / 2
faceBounds.forEachIndexed { index, bounds ->
val isSelected = selectedFaceIndex == index
// Scale and position the face box
val left = bounds.left * scale + offsetX
val top = bounds.top * scale + offsetY
val width = bounds.width() * scale
val height = bounds.height() * scale
// Draw box
drawRect(
color = if (isSelected) Color(0xFF4CAF50) else Color(0xFF2196F3),
topLeft = Offset(left, top),
size = Size(width, height),
style = Stroke(width = if (isSelected) 6f else 4f)
)
// Draw semi-transparent fill for selected
if (isSelected) {
drawRect(
color = Color(0xFF4CAF50).copy(alpha = 0.2f),
topLeft = Offset(left, top),
size = Size(width, height)
)
}
// Draw face number label
drawCircle(
color = if (isSelected) Color(0xFF4CAF50) else Color(0xFF2196F3),
radius = 20f * scale,
center = Offset(left + 20f * scale, top + 20f * scale)
)
}
}
}
// Clickable areas for each face
faceBounds.forEachIndexed { index, bounds ->
if (imageBounds.width() > 0 && imageBounds.height() > 0) {
val scaleX = imageSize.width / imageBounds.width()
val scaleY = imageSize.height / imageBounds.height()
val scale = minOf(scaleX, scaleY)
val scaledWidth = imageBounds.width() * scale
val scaledHeight = imageBounds.height() * scale
val offsetX = (imageSize.width - scaledWidth) / 2
val offsetY = (imageSize.height - scaledHeight) / 2
Box(
modifier = Modifier
.fillMaxSize()
.clickable { onFaceClick(index) }
)
}
}
}
// Update image size
BoxWithConstraints {
LaunchedEffect(constraints) {
imageSize = Size(constraints.maxWidth.toFloat(), constraints.maxHeight.toFloat())
}
}
}
/**
* Individual face preview card
*/
@Composable
private fun FacePreviewCard(
faceBitmap: Bitmap,
index: Int,
isSelected: Boolean,
onClick: () -> Unit,
modifier: Modifier = Modifier
) {
Card(
modifier = modifier
.aspectRatio(1f)
.clickable(onClick = onClick),
colors = CardDefaults.cardColors(
containerColor = if (isSelected)
MaterialTheme.colorScheme.primaryContainer
else
MaterialTheme.colorScheme.surface
),
border = if (isSelected)
BorderStroke(3.dp, MaterialTheme.colorScheme.primary)
else
BorderStroke(1.dp, MaterialTheme.colorScheme.outline)
) {
Box(
modifier = Modifier.fillMaxSize()
) {
androidx.compose.foundation.Image(
bitmap = faceBitmap.asImageBitmap(),
contentDescription = "Face ${index + 1}",
modifier = Modifier.fillMaxSize(),
contentScale = ContentScale.Crop
)
// Selected checkmark (only show when selected)
if (isSelected) {
Surface(
modifier = Modifier
.align(Alignment.Center),
shape = CircleShape,
color = MaterialTheme.colorScheme.primary.copy(alpha = 0.9f)
) {
Icon(
Icons.Default.CheckCircle,
contentDescription = "Selected",
modifier = Modifier
.padding(12.dp)
.size(32.dp),
tint = MaterialTheme.colorScheme.onPrimary
)
}
}
// Face number badge (always in top-right, small)
Surface(
modifier = Modifier
.align(Alignment.TopEnd)
.padding(4.dp),
shape = CircleShape,
color = if (isSelected)
MaterialTheme.colorScheme.primary
else
MaterialTheme.colorScheme.surfaceVariant.copy(alpha = 0.9f),
shadowElevation = 2.dp
) {
Text(
text = "${index + 1}",
modifier = Modifier.padding(6.dp),
style = MaterialTheme.typography.labelSmall,
fontWeight = FontWeight.Bold,
color = if (isSelected)
MaterialTheme.colorScheme.onPrimary
else
MaterialTheme.colorScheme.onSurfaceVariant
)
}
}
}
}
/**
* Helper function to load bitmap from URI
*/
private suspend fun loadBitmapFromUri(
context: android.content.Context,
uri: Uri
): Bitmap? = withContext(Dispatchers.IO) {
try {
val inputStream = context.contentResolver.openInputStream(uri)
BitmapFactory.decodeStream(inputStream)?.also {
inputStream?.close()
}
} catch (e: Exception) {
null
}
}
/**
* Helper function to crop face from bitmap
*/
private fun cropFaceFromBitmap(bitmap: Bitmap, faceBounds: Rect): Bitmap {
// Add 20% padding around the face
val padding = (faceBounds.width() * 0.2f).toInt()
val left = (faceBounds.left - padding).coerceAtLeast(0)
val top = (faceBounds.top - padding).coerceAtLeast(0)
val right = (faceBounds.right + padding).coerceAtMost(bitmap.width)
val bottom = (faceBounds.bottom + padding).coerceAtMost(bitmap.height)
val width = right - left
val height = bottom - top
return Bitmap.createBitmap(bitmap, left, top, width, height)
}

View File

@@ -0,0 +1,56 @@
package com.placeholder.sherpai2.ui.trainingprep
import android.graphics.Rect
import android.net.Uri
import androidx.compose.foundation.Canvas
import androidx.compose.foundation.Image
import androidx.compose.foundation.clickable
import androidx.compose.foundation.layout.*
import androidx.compose.foundation.lazy.LazyRow
import androidx.compose.foundation.lazy.items
import androidx.compose.material3.Card
import androidx.compose.material3.MaterialTheme
import androidx.compose.material3.Text
import androidx.compose.runtime.Composable
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.layout.ContentScale
import androidx.compose.ui.unit.dp
import coil.compose.rememberAsyncImagePainter
@Composable
fun FacePickerScreen(
uri: Uri,
faceBoxes: List<Rect>,
onFaceSelected: (Rect) -> Unit
) {
Column(modifier = Modifier.fillMaxSize().padding(16.dp)) {
Text("Multiple faces detected!", style = MaterialTheme.typography.headlineSmall)
Text("Tap the person you want to train on.")
Box(modifier = Modifier.weight(1f).fillMaxWidth().padding(vertical = 16.dp)) {
// Main Image
Image(
painter = rememberAsyncImagePainter(uri),
contentDescription = null,
modifier = Modifier.fillMaxSize(),
contentScale = ContentScale.Fit
)
// Overlay Clickable Boxes
// Note: In a production app, you'd need to map Rect coordinates
// from the Bitmap scale to the UI View scale.
Canvas(modifier = Modifier.fillMaxSize().clickable { /* Handle general tap */ }) {
// Implementation of coordinate mapping goes here
}
// Simplified: Just show the options as a list of crops if Canvas mapping is too complex for now
LazyRow {
items(faceBoxes) { box ->
Card(modifier = Modifier.padding(8.dp).size(100.dp).clickable { onFaceSelected(box) }) {
Text("Face ${faceBoxes.indexOf(box) + 1}", Modifier.align(Alignment.CenterHorizontally))
}
}
}
}
}
}

View File

@@ -0,0 +1,124 @@
package com.placeholder.sherpai2.ui.trainingprep
import android.content.Context
import android.graphics.Bitmap
import android.graphics.BitmapFactory
import android.graphics.Rect
import android.net.Uri
import com.google.mlkit.vision.common.InputImage
import com.google.mlkit.vision.face.FaceDetection
import com.google.mlkit.vision.face.FaceDetectorOptions
import kotlinx.coroutines.tasks.await
import java.io.InputStream
/**
* Helper class for detecting faces in images using ML Kit Face Detection
*/
class FaceDetectionHelper(private val context: Context) {
private val faceDetectorOptions = FaceDetectorOptions.Builder()
.setPerformanceMode(FaceDetectorOptions.PERFORMANCE_MODE_ACCURATE)
.setLandmarkMode(FaceDetectorOptions.LANDMARK_MODE_ALL)
.setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_ALL)
.setMinFaceSize(0.15f) // Detect faces that are at least 15% of image
.build()
private val detector = FaceDetection.getClient(faceDetectorOptions)
data class FaceDetectionResult(
val uri: Uri,
val hasFace: Boolean,
val faceCount: Int,
val faceBounds: List<Rect> = emptyList(),
val croppedFaceBitmap: Bitmap? = null,
val errorMessage: String? = null
)
/**
* Detect faces in a single image
*/
suspend fun detectFacesInImage(uri: Uri): FaceDetectionResult {
return try {
val bitmap = loadBitmap(uri)
if (bitmap == null) {
return FaceDetectionResult(
uri = uri,
hasFace = false,
faceCount = 0,
errorMessage = "Failed to load image"
)
}
val inputImage = InputImage.fromBitmap(bitmap, 0)
val faces = detector.process(inputImage).await()
val croppedFace = if (faces.isNotEmpty()) {
// Crop the first detected face with some padding
cropFaceFromBitmap(bitmap, faces[0].boundingBox)
} else null
FaceDetectionResult(
uri = uri,
hasFace = faces.isNotEmpty(),
faceCount = faces.size,
faceBounds = faces.map { it.boundingBox },
croppedFaceBitmap = croppedFace
)
} catch (e: Exception) {
FaceDetectionResult(
uri = uri,
hasFace = false,
faceCount = 0,
errorMessage = e.message ?: "Unknown error"
)
}
}
/**
* Detect faces in multiple images
*/
suspend fun detectFacesInImages(uris: List<Uri>): List<FaceDetectionResult> {
return uris.map { uri ->
detectFacesInImage(uri)
}
}
/**
* Crop face from bitmap with padding
*/
private fun cropFaceFromBitmap(bitmap: Bitmap, faceBounds: Rect): Bitmap {
// Add 20% padding around the face
val padding = (faceBounds.width() * 0.2f).toInt()
val left = (faceBounds.left - padding).coerceAtLeast(0)
val top = (faceBounds.top - padding).coerceAtLeast(0)
val right = (faceBounds.right + padding).coerceAtMost(bitmap.width)
val bottom = (faceBounds.bottom + padding).coerceAtMost(bitmap.height)
val width = right - left
val height = bottom - top
return Bitmap.createBitmap(bitmap, left, top, width, height)
}
/**
* Load bitmap from URI
*/
private fun loadBitmap(uri: Uri): Bitmap? {
return try {
val inputStream: InputStream? = context.contentResolver.openInputStream(uri)
BitmapFactory.decodeStream(inputStream)?.also {
inputStream?.close()
}
} catch (e: Exception) {
null
}
}
/**
* Clean up resources
*/
fun cleanup() {
detector.close()
}
}

View File

@@ -0,0 +1,130 @@
package com.placeholder.sherpai2.ui.trainingprep
import android.net.Uri
import androidx.activity.compose.rememberLauncherForActivityResult
import androidx.activity.result.contract.ActivityResultContracts
import androidx.compose.foundation.layout.*
import androidx.compose.foundation.lazy.grid.GridCells
import androidx.compose.foundation.lazy.grid.LazyVerticalGrid
import androidx.compose.foundation.shape.CircleShape
import androidx.compose.foundation.shape.RoundedCornerShape
import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.filled.AddPhotoAlternate
import androidx.compose.material.icons.filled.Close
import androidx.compose.material3.*
import androidx.compose.runtime.*
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.layout.ContentScale
import androidx.compose.ui.unit.dp
import androidx.compose.material3.Text
import androidx.compose.runtime.saveable.rememberSaveable
import androidx.compose.ui.draw.clip
import androidx.compose.ui.platform.LocalContext
import coil.compose.AsyncImage
import androidx.compose.foundation.lazy.grid.items
@OptIn(ExperimentalMaterial3Api::class)
@Composable
fun ImageSelectorScreen(
onImagesSelected: (List<Uri>) -> Unit
) {
//1. Persist state across configuration changes
var selectedUris by rememberSaveable { mutableStateOf<List<Uri>>(emptyList()) }
val context = LocalContext.current
val launcher = rememberLauncherForActivityResult(
ActivityResultContracts.OpenMultipleDocuments()
) { uris ->
// 2. Take first 10 and try to persist permissions
val limitedUris = uris.take(10)
selectedUris = limitedUris
}
Scaffold(
topBar = { TopAppBar(title = { Text("Select Training Photos") }) }
) { padding ->
Column(
modifier = Modifier
.padding(padding)
.padding(16.dp)
.fillMaxSize(),
verticalArrangement = Arrangement.spacedBy(16.dp)
) {
OutlinedCard(
onClick = { launcher.launch(arrayOf("image/*")) },
modifier = Modifier.fillMaxWidth()
) {
Column(
modifier = Modifier.padding(24.dp),
horizontalAlignment = Alignment.CenterHorizontally
) {
Icon(Icons.Default.AddPhotoAlternate, contentDescription = null)
Spacer(Modifier.height(8.dp))
Text("Select up to 10 images of the person")
Text(
text = "${selectedUris.size} / 10 selected",
style = MaterialTheme.typography.labelLarge,
color = if (selectedUris.size == 10) MaterialTheme.colorScheme.error
else if (selectedUris.isNotEmpty()) MaterialTheme.colorScheme.primary
else MaterialTheme.colorScheme.outline
)
}
}
// 3. Conditional rendering for empty state
if (selectedUris.isEmpty()) {
Box(Modifier
.weight(1f)
.fillMaxWidth(), contentAlignment = Alignment.Center) {
Text("No images selected", style = MaterialTheme.typography.bodyMedium)
}
} else {
LazyVerticalGrid(
columns = GridCells.Fixed(3),
modifier = Modifier.weight(1f),
contentPadding = PaddingValues(4.dp)
) {
items(selectedUris, key = { it.toString() }) { uri ->
Box(modifier = Modifier.padding(4.dp)) {
AsyncImage(
model = uri,
contentDescription = null,
modifier = Modifier
.aspectRatio(1f)
.clip(RoundedCornerShape(8.dp)),
contentScale = ContentScale.Crop
)
// 4. Ability to remove specific images
Surface(
onClick = { selectedUris = selectedUris - uri },
modifier = Modifier
.align(Alignment.TopEnd)
.padding(4.dp),
shape = CircleShape,
color = MaterialTheme.colorScheme.surfaceVariant.copy(alpha = 0.8f)
) {
Icon(
Icons.Default.Close,
contentDescription = "Remove",
modifier = Modifier.size(16.dp)
)
}
}
}
}
}
Button(
modifier = Modifier.fillMaxWidth(),
enabled = selectedUris.isNotEmpty(),
onClick = { onImagesSelected(selectedUris) }
) {
Text("Start Face Detection")
}
}
}
}

View File

@@ -0,0 +1,879 @@
package com.placeholder.sherpai2.ui.trainingprep
import android.net.Uri
import androidx.activity.compose.rememberLauncherForActivityResult
import androidx.activity.result.PickVisualMediaRequest
import androidx.activity.result.contract.ActivityResultContracts
import androidx.compose.foundation.BorderStroke
import androidx.compose.foundation.Image
import androidx.compose.foundation.background
import androidx.compose.foundation.border
import androidx.compose.foundation.layout.*
import androidx.compose.foundation.lazy.LazyColumn
import androidx.compose.foundation.lazy.itemsIndexed
import androidx.compose.foundation.shape.CircleShape
import androidx.compose.foundation.shape.RoundedCornerShape
import androidx.compose.foundation.text.KeyboardActions
import androidx.compose.foundation.text.KeyboardOptions
import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.filled.*
import androidx.compose.material3.*
import androidx.compose.runtime.*
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier
import androidx.compose.ui.draw.clip
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.graphics.asImageBitmap
import androidx.compose.ui.layout.ContentScale
import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.text.input.ImeAction
import androidx.compose.ui.text.input.KeyboardCapitalization
import androidx.compose.ui.text.style.TextAlign
import androidx.compose.ui.unit.dp
import androidx.hilt.navigation.compose.hiltViewModel
import coil.compose.AsyncImage
@OptIn(ExperimentalMaterial3Api::class)
@Composable
fun ScanResultsScreen(
state: ScanningState,
onFinish: () -> Unit,
trainViewModel: TrainViewModel = hiltViewModel()
) {
var showFacePickerDialog by remember { mutableStateOf<FaceDetectionHelper.FaceDetectionResult?>(null) }
var showNameInputDialog by remember { mutableStateOf(false) }
// Observe training state
val trainingState by trainViewModel.trainingState.collectAsState()
// Handle training state changes
LaunchedEffect(trainingState) {
when (trainingState) {
is TrainingState.Success -> {
// Training completed successfully
val success = trainingState as TrainingState.Success
// You can show a success message or navigate away
// For now, we'll just reset and finish
trainViewModel.resetTrainingState()
onFinish()
}
is TrainingState.Error -> {
// Error will be shown in dialog, no action needed here
}
else -> { /* Idle or Processing */ }
}
}
Scaffold(
topBar = {
TopAppBar(
title = { Text("Training Image Analysis") },
colors = TopAppBarDefaults.topAppBarColors(
containerColor = MaterialTheme.colorScheme.primaryContainer
)
)
}
) { paddingValues ->
Box(
modifier = Modifier
.fillMaxSize()
.padding(paddingValues)
) {
when (state) {
is ScanningState.Idle -> {
// Should not happen
}
is ScanningState.Processing -> {
ProcessingView(
progress = state.progress,
total = state.total
)
}
is ScanningState.Success -> {
ImprovedResultsView(
result = state.sanityCheckResult,
onContinue = {
// Show name input dialog instead of immediately finishing
showNameInputDialog = true
},
onRetry = onFinish,
onReplaceImage = { oldUri, newUri ->
trainViewModel.replaceImage(oldUri, newUri)
},
onSelectFaceFromMultiple = { result ->
showFacePickerDialog = result
}
)
}
is ScanningState.Error -> {
ErrorView(
message = state.message,
onRetry = onFinish
)
}
}
// Show training overlay if processing
if (trainingState is TrainingState.Processing) {
TrainingOverlay(trainingState = trainingState as TrainingState.Processing)
}
}
}
// Face Picker Dialog
showFacePickerDialog?.let { result ->
FacePickerDialog(
result = result,
onDismiss = { showFacePickerDialog = null },
onFaceSelected = { faceIndex, croppedFaceBitmap ->
trainViewModel.selectFaceFromImage(result.uri, faceIndex, croppedFaceBitmap)
showFacePickerDialog = null
}
)
}
// Name Input Dialog
if (showNameInputDialog) {
NameInputDialog(
onDismiss = { showNameInputDialog = false },
onConfirm = { name ->
showNameInputDialog = false
trainViewModel.createFaceModel(name)
},
trainingState = trainingState
)
}
}
/**
* Dialog for entering person's name before training
*/
@OptIn(ExperimentalMaterial3Api::class)
@Composable
private fun NameInputDialog(
onDismiss: () -> Unit,
onConfirm: (String) -> Unit,
trainingState: TrainingState
) {
var personName by remember { mutableStateOf("") }
val isError = trainingState is TrainingState.Error
AlertDialog(
onDismissRequest = {
if (trainingState !is TrainingState.Processing) {
onDismiss()
}
},
title = {
Text(
text = if (isError) "Training Error" else "Who is this?",
style = MaterialTheme.typography.headlineSmall
)
},
text = {
Column(
verticalArrangement = Arrangement.spacedBy(16.dp)
) {
if (isError) {
// Show error message
val error = trainingState as TrainingState.Error
Surface(
color = MaterialTheme.colorScheme.errorContainer,
shape = RoundedCornerShape(8.dp)
) {
Row(
modifier = Modifier.padding(12.dp),
horizontalArrangement = Arrangement.spacedBy(8.dp),
verticalAlignment = Alignment.CenterVertically
) {
Icon(
Icons.Default.Warning,
contentDescription = null,
tint = MaterialTheme.colorScheme.error
)
Text(
text = error.message,
style = MaterialTheme.typography.bodyMedium,
color = MaterialTheme.colorScheme.onErrorContainer
)
}
}
} else {
Text(
text = "Enter the name of the person in these training images. This will help you find their photos later.",
style = MaterialTheme.typography.bodyMedium
)
}
OutlinedTextField(
value = personName,
onValueChange = { personName = it },
label = { Text("Person's Name") },
placeholder = { Text("e.g., John Doe") },
singleLine = true,
enabled = trainingState !is TrainingState.Processing,
keyboardOptions = KeyboardOptions(
capitalization = KeyboardCapitalization.Words,
imeAction = ImeAction.Done
),
keyboardActions = KeyboardActions(
onDone = {
if (personName.isNotBlank()) {
onConfirm(personName.trim())
}
}
),
modifier = Modifier.fillMaxWidth()
)
}
},
confirmButton = {
Button(
onClick = { onConfirm(personName.trim()) },
enabled = personName.isNotBlank() && trainingState !is TrainingState.Processing
) {
if (trainingState is TrainingState.Processing) {
CircularProgressIndicator(
modifier = Modifier.size(16.dp),
strokeWidth = 2.dp,
color = MaterialTheme.colorScheme.onPrimary
)
Spacer(modifier = Modifier.width(8.dp))
}
Text(if (isError) "Try Again" else "Start Training")
}
},
dismissButton = {
if (trainingState !is TrainingState.Processing) {
TextButton(onClick = onDismiss) {
Text("Cancel")
}
}
}
)
}
/**
* Overlay shown during training process
*/
@Composable
private fun TrainingOverlay(trainingState: TrainingState.Processing) {
Box(
modifier = Modifier
.fillMaxSize()
.background(Color.Black.copy(alpha = 0.7f)),
contentAlignment = Alignment.Center
) {
Card(
modifier = Modifier
.padding(32.dp)
.fillMaxWidth(0.9f),
colors = CardDefaults.cardColors(
containerColor = MaterialTheme.colorScheme.surface
)
) {
Column(
modifier = Modifier.padding(24.dp),
horizontalAlignment = Alignment.CenterHorizontally,
verticalArrangement = Arrangement.spacedBy(16.dp)
) {
CircularProgressIndicator(
modifier = Modifier.size(64.dp),
strokeWidth = 6.dp
)
Text(
text = "Creating Face Model",
style = MaterialTheme.typography.titleLarge,
fontWeight = FontWeight.Bold
)
Text(
text = trainingState.stage,
style = MaterialTheme.typography.bodyMedium,
textAlign = TextAlign.Center,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
if (trainingState.total > 0) {
LinearProgressIndicator(
progress = { (trainingState.progress.toFloat() / trainingState.total.toFloat()).coerceIn(0f, 1f) },
modifier = Modifier.fillMaxWidth()
)
Text(
text = "${trainingState.progress} / ${trainingState.total}",
style = MaterialTheme.typography.bodySmall,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
}
}
}
}
}
@Composable
private fun ProcessingView(progress: Int, total: Int) {
Column(
modifier = Modifier.fillMaxSize(),
horizontalAlignment = Alignment.CenterHorizontally,
verticalArrangement = Arrangement.Center
) {
CircularProgressIndicator(
modifier = Modifier.size(64.dp),
strokeWidth = 6.dp
)
Spacer(modifier = Modifier.height(24.dp))
Text(
text = "Analyzing images...",
style = MaterialTheme.typography.titleMedium
)
Spacer(modifier = Modifier.height(8.dp))
Text(
text = "Detecting faces and checking for duplicates",
style = MaterialTheme.typography.bodyMedium,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
if (total > 0) {
Spacer(modifier = Modifier.height(16.dp))
LinearProgressIndicator(
progress = { (progress.toFloat() / total.toFloat()).coerceIn(0f, 1f) },
modifier = Modifier.width(200.dp)
)
Text(
text = "$progress / $total",
style = MaterialTheme.typography.bodySmall
)
}
}
}
@Composable
private fun ImprovedResultsView(
result: TrainingSanityChecker.SanityCheckResult,
onContinue: () -> Unit,
onRetry: () -> Unit,
onReplaceImage: (Uri, Uri) -> Unit,
onSelectFaceFromMultiple: (FaceDetectionHelper.FaceDetectionResult) -> Unit
) {
LazyColumn(
modifier = Modifier.fillMaxSize(),
contentPadding = PaddingValues(16.dp),
verticalArrangement = Arrangement.spacedBy(16.dp)
) {
// Welcome Header
item {
Card(
modifier = Modifier.fillMaxWidth(),
colors = CardDefaults.cardColors(
containerColor = MaterialTheme.colorScheme.secondaryContainer
)
) {
Column(
modifier = Modifier.padding(16.dp)
) {
Text(
text = "Analysis Complete!",
style = MaterialTheme.typography.headlineSmall,
fontWeight = FontWeight.Bold
)
Spacer(modifier = Modifier.height(4.dp))
Text(
text = "Review your images below. Tap 'Pick Face' on group photos to choose which person to train on, or 'Replace' to swap out any image.",
style = MaterialTheme.typography.bodyMedium,
color = MaterialTheme.colorScheme.onSecondaryContainer.copy(alpha = 0.8f)
)
}
}
}
// Progress Summary
item {
ProgressSummaryCard(
totalImages = result.faceDetectionResults.size,
validImages = result.validImagesWithFaces.size,
requiredImages = 10,
isValid = result.isValid
)
}
// Image List Header
item {
Text(
text = "Your Images (${result.faceDetectionResults.size})",
style = MaterialTheme.typography.titleLarge,
fontWeight = FontWeight.Bold
)
}
// Image List with Actions
itemsIndexed(result.faceDetectionResults) { index, imageResult ->
ImageResultCard(
index = index + 1,
result = imageResult,
onReplace = { newUri ->
onReplaceImage(imageResult.uri, newUri)
},
onSelectFace = if (imageResult.faceCount > 1) {
{ onSelectFaceFromMultiple(imageResult) }
} else null
)
}
// Validation Issues (if any)
if (result.validationErrors.isNotEmpty()) {
item {
Spacer(modifier = Modifier.height(8.dp))
ValidationIssuesCard(errors = result.validationErrors)
}
}
// Action Button
item {
Spacer(modifier = Modifier.height(8.dp))
Button(
onClick = if (result.isValid) onContinue else onRetry,
modifier = Modifier.fillMaxWidth(),
enabled = result.isValid,
colors = ButtonDefaults.buttonColors(
containerColor = if (result.isValid)
MaterialTheme.colorScheme.primary
else
MaterialTheme.colorScheme.error.copy(alpha = 0.5f)
)
) {
Icon(
if (result.isValid) Icons.Default.CheckCircle else Icons.Default.Warning,
contentDescription = null
)
Spacer(modifier = Modifier.width(8.dp))
Text(
if (result.isValid)
"Continue to Training (${result.validImagesWithFaces.size} images)"
else
"Fix ${result.validationErrors.size} Issue${if (result.validationErrors.size != 1) "s" else ""} to Continue"
)
}
if (!result.isValid) {
Spacer(modifier = Modifier.height(8.dp))
Surface(
modifier = Modifier.fillMaxWidth(),
color = MaterialTheme.colorScheme.tertiaryContainer,
shape = RoundedCornerShape(8.dp)
) {
Row(
modifier = Modifier.padding(12.dp),
verticalAlignment = Alignment.CenterVertically
) {
Icon(
Icons.Default.Info,
contentDescription = null,
tint = MaterialTheme.colorScheme.onTertiaryContainer,
modifier = Modifier.size(20.dp)
)
Spacer(modifier = Modifier.width(8.dp))
Text(
text = "Tip: Use 'Replace' to swap problematic images, or 'Pick Face' to choose from group photos",
style = MaterialTheme.typography.bodySmall,
color = MaterialTheme.colorScheme.onTertiaryContainer
)
}
}
}
}
}
}
@Composable
private fun ProgressSummaryCard(
totalImages: Int,
validImages: Int,
requiredImages: Int,
isValid: Boolean
) {
Card(
modifier = Modifier.fillMaxWidth(),
colors = CardDefaults.cardColors(
containerColor = if (isValid)
MaterialTheme.colorScheme.primaryContainer.copy(alpha = 0.5f)
else
MaterialTheme.colorScheme.errorContainer.copy(alpha = 0.3f)
)
) {
Column(
modifier = Modifier.padding(16.dp)
) {
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.SpaceBetween,
verticalAlignment = Alignment.CenterVertically
) {
Text(
text = "Progress",
style = MaterialTheme.typography.titleMedium,
fontWeight = FontWeight.Bold
)
Icon(
imageVector = if (isValid) Icons.Default.CheckCircle else Icons.Default.Warning,
contentDescription = null,
tint = if (isValid)
MaterialTheme.colorScheme.primary
else
MaterialTheme.colorScheme.error,
modifier = Modifier.size(32.dp)
)
}
Spacer(modifier = Modifier.height(12.dp))
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.SpaceEvenly
) {
StatItem(
label = "Total",
value = totalImages.toString(),
color = MaterialTheme.colorScheme.onSurface
)
StatItem(
label = "Valid",
value = validImages.toString(),
color = if (validImages >= requiredImages)
MaterialTheme.colorScheme.primary
else
MaterialTheme.colorScheme.error
)
StatItem(
label = "Need",
value = requiredImages.toString(),
color = MaterialTheme.colorScheme.onSurface.copy(alpha = 0.6f)
)
}
Spacer(modifier = Modifier.height(12.dp))
LinearProgressIndicator(
progress = { (validImages.toFloat() / requiredImages.toFloat()).coerceIn(0f, 1f) },
modifier = Modifier.fillMaxWidth(),
color = if (isValid) MaterialTheme.colorScheme.primary else MaterialTheme.colorScheme.error
)
}
}
}
@Composable
private fun StatItem(label: String, value: String, color: Color) {
Column(horizontalAlignment = Alignment.CenterHorizontally) {
Text(
text = value,
style = MaterialTheme.typography.headlineMedium,
fontWeight = FontWeight.Bold,
color = color
)
Text(
text = label,
style = MaterialTheme.typography.bodySmall,
color = color.copy(alpha = 0.7f)
)
}
}
@Composable
private fun ImageResultCard(
index: Int,
result: FaceDetectionHelper.FaceDetectionResult,
onReplace: (Uri) -> Unit,
onSelectFace: (() -> Unit)?
) {
val photoPickerLauncher = rememberLauncherForActivityResult(
contract = ActivityResultContracts.PickVisualMedia()
) { uri ->
uri?.let { onReplace(it) }
}
val status = when {
result.errorMessage != null -> ImageStatus.ERROR
!result.hasFace -> ImageStatus.NO_FACE
result.faceCount > 1 -> ImageStatus.MULTIPLE_FACES
result.faceCount == 1 -> ImageStatus.VALID
else -> ImageStatus.ERROR
}
Card(
modifier = Modifier.fillMaxWidth(),
colors = CardDefaults.cardColors(
containerColor = when (status) {
ImageStatus.VALID -> MaterialTheme.colorScheme.primaryContainer.copy(alpha = 0.3f)
ImageStatus.MULTIPLE_FACES -> MaterialTheme.colorScheme.tertiaryContainer.copy(alpha = 0.4f)
else -> MaterialTheme.colorScheme.errorContainer.copy(alpha = 0.3f)
}
)
) {
Row(
modifier = Modifier
.fillMaxWidth()
.padding(12.dp),
verticalAlignment = Alignment.CenterVertically,
horizontalArrangement = Arrangement.spacedBy(12.dp)
) {
// Image Number Badge
Box(
modifier = Modifier
.size(40.dp)
.background(
color = when (status) {
ImageStatus.VALID -> MaterialTheme.colorScheme.primary
ImageStatus.MULTIPLE_FACES -> MaterialTheme.colorScheme.tertiary
else -> MaterialTheme.colorScheme.error
},
shape = CircleShape
),
contentAlignment = Alignment.Center
) {
Text(
text = index.toString(),
style = MaterialTheme.typography.titleMedium,
fontWeight = FontWeight.Bold,
color = Color.White
)
}
// Thumbnail
if (result.croppedFaceBitmap != null) {
Image(
bitmap = result.croppedFaceBitmap.asImageBitmap(),
contentDescription = "Face",
modifier = Modifier
.size(64.dp)
.clip(RoundedCornerShape(8.dp))
.border(
BorderStroke(
2.dp,
when (status) {
ImageStatus.VALID -> MaterialTheme.colorScheme.primary
ImageStatus.MULTIPLE_FACES -> MaterialTheme.colorScheme.tertiary
else -> MaterialTheme.colorScheme.error
}
),
RoundedCornerShape(8.dp)
),
contentScale = ContentScale.Crop
)
} else {
AsyncImage(
model = result.uri,
contentDescription = "Original image",
modifier = Modifier
.size(64.dp)
.clip(RoundedCornerShape(8.dp)),
contentScale = ContentScale.Crop
)
}
// Status and Info
Column(
modifier = Modifier.weight(1f)
) {
Row(verticalAlignment = Alignment.CenterVertically) {
Icon(
imageVector = when (status) {
ImageStatus.VALID -> Icons.Default.CheckCircle
ImageStatus.MULTIPLE_FACES -> Icons.Default.Info
else -> Icons.Default.Warning
},
contentDescription = null,
tint = when (status) {
ImageStatus.VALID -> MaterialTheme.colorScheme.primary
ImageStatus.MULTIPLE_FACES -> MaterialTheme.colorScheme.tertiary
else -> MaterialTheme.colorScheme.error
},
modifier = Modifier.size(20.dp)
)
Spacer(modifier = Modifier.width(4.dp))
Text(
text = when (status) {
ImageStatus.VALID -> "Face Detected"
ImageStatus.MULTIPLE_FACES -> "Multiple Faces (${result.faceCount})"
ImageStatus.NO_FACE -> "No Face Detected"
ImageStatus.ERROR -> "Error"
},
style = MaterialTheme.typography.bodyMedium,
fontWeight = FontWeight.SemiBold
)
}
Text(
text = result.uri.lastPathSegment ?: "Unknown",
style = MaterialTheme.typography.bodySmall,
color = MaterialTheme.colorScheme.onSurfaceVariant,
maxLines = 1
)
}
// Action Buttons
Column(
horizontalAlignment = Alignment.End,
verticalArrangement = Arrangement.spacedBy(4.dp)
) {
// Select Face button (for multiple faces)
if (onSelectFace != null) {
OutlinedButton(
onClick = onSelectFace,
modifier = Modifier.height(32.dp),
contentPadding = PaddingValues(horizontal = 12.dp, vertical = 0.dp),
colors = ButtonDefaults.outlinedButtonColors(
contentColor = MaterialTheme.colorScheme.tertiary
),
border = BorderStroke(1.dp, MaterialTheme.colorScheme.tertiary)
) {
Icon(
Icons.Default.Face,
contentDescription = null,
modifier = Modifier.size(16.dp)
)
Spacer(modifier = Modifier.width(4.dp))
Text("Pick Face", style = MaterialTheme.typography.bodySmall)
}
}
// Replace button
OutlinedButton(
onClick = {
photoPickerLauncher.launch(
PickVisualMediaRequest(ActivityResultContracts.PickVisualMedia.ImageOnly)
)
},
modifier = Modifier.height(32.dp),
contentPadding = PaddingValues(horizontal = 12.dp, vertical = 0.dp)
) {
Icon(
Icons.Default.Refresh,
contentDescription = null,
modifier = Modifier.size(16.dp)
)
Spacer(modifier = Modifier.width(4.dp))
Text("Replace", style = MaterialTheme.typography.bodySmall)
}
}
}
}
}
@Composable
private fun ValidationIssuesCard(errors: List<TrainingSanityChecker.ValidationError>) {
Card(
modifier = Modifier.fillMaxWidth(),
colors = CardDefaults.cardColors(
containerColor = MaterialTheme.colorScheme.errorContainer.copy(alpha = 0.3f)
)
) {
Column(
modifier = Modifier.padding(16.dp),
verticalArrangement = Arrangement.spacedBy(8.dp)
) {
Row(verticalAlignment = Alignment.CenterVertically) {
Icon(
Icons.Default.Warning,
contentDescription = null,
tint = MaterialTheme.colorScheme.error
)
Spacer(modifier = Modifier.width(8.dp))
Text(
text = "Issues Found (${errors.size})",
style = MaterialTheme.typography.titleMedium,
fontWeight = FontWeight.Bold,
color = MaterialTheme.colorScheme.error
)
}
HorizontalDivider(color = MaterialTheme.colorScheme.error.copy(alpha = 0.3f))
errors.forEach { error ->
when (error) {
is TrainingSanityChecker.ValidationError.NoFaceDetected -> {
Text(
text = "${error.uris.size} image(s) without detected faces - use Replace button",
style = MaterialTheme.typography.bodyMedium
)
}
is TrainingSanityChecker.ValidationError.MultipleFacesDetected -> {
Text(
text = "${error.uri.lastPathSegment} has ${error.faceCount} faces - use Pick Face button",
style = MaterialTheme.typography.bodyMedium
)
}
is TrainingSanityChecker.ValidationError.DuplicateImages -> {
Text(
text = "${error.groups.size} duplicate image group(s) - replace duplicates",
style = MaterialTheme.typography.bodyMedium
)
}
is TrainingSanityChecker.ValidationError.InsufficientImages -> {
Text(
text = "• Need ${error.required} valid images, currently have ${error.available}",
style = MaterialTheme.typography.bodyMedium,
fontWeight = FontWeight.Bold
)
}
is TrainingSanityChecker.ValidationError.ImageLoadError -> {
Text(
text = "• Failed to load ${error.uri.lastPathSegment} - use Replace button",
style = MaterialTheme.typography.bodyMedium
)
}
}
}
}
}
}
@Composable
private fun ErrorView(
message: String,
onRetry: () -> Unit
) {
Column(
modifier = Modifier
.fillMaxSize()
.padding(16.dp),
horizontalAlignment = Alignment.CenterHorizontally,
verticalArrangement = Arrangement.Center
) {
Icon(
imageVector = Icons.Default.Close,
contentDescription = null,
modifier = Modifier.size(64.dp),
tint = MaterialTheme.colorScheme.error
)
Spacer(modifier = Modifier.height(16.dp))
Text(
text = "Error",
style = MaterialTheme.typography.titleLarge,
fontWeight = FontWeight.Bold
)
Spacer(modifier = Modifier.height(8.dp))
Text(
text = message,
style = MaterialTheme.typography.bodyMedium,
textAlign = TextAlign.Center
)
Spacer(modifier = Modifier.height(24.dp))
Button(onClick = onRetry) {
Icon(Icons.Default.Refresh, contentDescription = null)
Spacer(modifier = Modifier.width(8.dp))
Text("Try Again")
}
}
}
private enum class ImageStatus {
VALID,
MULTIPLE_FACES,
NO_FACE,
ERROR
}

View File

@@ -0,0 +1,395 @@
package com.placeholder.sherpai2.ui.trainingprep
import android.app.Application
import android.graphics.Bitmap
import android.net.Uri
import androidx.lifecycle.AndroidViewModel
import androidx.lifecycle.viewModelScope
import com.placeholder.sherpai2.data.repository.FaceRecognitionRepository
import com.placeholder.sherpai2.ml.FaceNetModel
import dagger.hilt.android.lifecycle.HiltViewModel
import kotlinx.coroutines.flow.MutableStateFlow
import kotlinx.coroutines.flow.StateFlow
import kotlinx.coroutines.flow.asStateFlow
import kotlinx.coroutines.launch
import javax.inject.Inject
/**
* State for image scanning and validation
*/
sealed class ScanningState {
object Idle : ScanningState()
data class Processing(val progress: Int, val total: Int) : ScanningState()
data class Success(
val sanityCheckResult: TrainingSanityChecker.SanityCheckResult
) : ScanningState()
data class Error(val message: String) : ScanningState()
}
/**
* State for face model training/creation
*/
sealed class TrainingState {
object Idle : TrainingState()
data class Processing(val stage: String, val progress: Int, val total: Int) : TrainingState()
data class Success(val personName: String, val personId: String) : TrainingState()
data class Error(val message: String) : TrainingState()
}
/**
* ViewModel for training face recognition models
*
* WORKFLOW:
* 1. User selects 10+ images → scanAndTagFaces()
* 2. Images validated → Success state with validImagesWithFaces
* 3. User can replace images or pick faces from group photos
* 4. When ready → createFaceModel(personName)
* 5. Creates PersonEntity + FaceModelEntity in database
*/
@HiltViewModel
class TrainViewModel @Inject constructor(
application: Application,
private val faceRecognitionRepository: FaceRecognitionRepository,
private val faceNetModel: FaceNetModel
) : AndroidViewModel(application) {
private val sanityChecker = TrainingSanityChecker(application)
private val faceDetectionHelper = FaceDetectionHelper(application)
// Scanning/validation state
private val _uiState = MutableStateFlow<ScanningState>(ScanningState.Idle)
val uiState: StateFlow<ScanningState> = _uiState.asStateFlow()
// Training/model creation state
private val _trainingState = MutableStateFlow<TrainingState>(TrainingState.Idle)
val trainingState: StateFlow<TrainingState> = _trainingState.asStateFlow()
// Keep track of current images for replacements
private var currentImageUris: List<Uri> = emptyList()
// Keep track of manual face selections (imageUri -> selectedFaceIndex)
private val manualFaceSelections = mutableMapOf<Uri, ManualFaceSelection>()
data class ManualFaceSelection(
val faceIndex: Int,
val croppedFaceBitmap: Bitmap
)
// ======================
// FACE MODEL CREATION
// ======================
/**
* Create face model from validated training images.
*
* COMPLETE PROCESS:
* 1. Verify we have 10+ validated images
* 2. Call repository to create PersonEntity + FaceModelEntity
* 3. Repository handles: embedding generation, averaging, database save
*
* Call this when user clicks "Continue to Training" after validation passes.
*
* @param personName Name for the new person
*
* EXAMPLE USAGE IN UI:
* if (result.isValid) {
* showNameDialog { name ->
* trainViewModel.createFaceModel(name)
* }
* }
*/
fun createFaceModel(personName: String) {
val currentState = _uiState.value
if (currentState !is ScanningState.Success) {
_trainingState.value = TrainingState.Error("No validated images available")
return
}
val validImages = currentState.sanityCheckResult.validImagesWithFaces
if (validImages.size < 10) {
_trainingState.value = TrainingState.Error("Need at least 10 valid images, have ${validImages.size}")
return
}
viewModelScope.launch {
try {
_trainingState.value = TrainingState.Processing(
stage = "Creating person and training model",
progress = 0,
total = validImages.size
)
// Repository handles everything:
// - Creates PersonEntity in 'persons' table
// - Generates embeddings from face bitmaps
// - Averages embeddings
// - Creates FaceModelEntity linked to PersonEntity
val personId = faceRecognitionRepository.createPersonWithFaceModel(
personName = personName,
validImages = validImages,
onProgress = { current, total ->
_trainingState.value = TrainingState.Processing(
stage = "Processing image $current/$total",
progress = current,
total = total
)
}
)
_trainingState.value = TrainingState.Success(
personName = personName,
personId = personId
)
} catch (e: Exception) {
_trainingState.value = TrainingState.Error(
e.message ?: "Failed to create face model"
)
}
}
}
/**
* Reset training state back to idle.
* Call this after handling success/error.
*/
fun resetTrainingState() {
_trainingState.value = TrainingState.Idle
}
// ======================
// IMAGE VALIDATION
// ======================
/**
* Scan and validate images for training.
*
* PROCESS:
* 1. Face detection on all images
* 2. Duplicate checking
* 3. Validation against requirements (10+ images, one face per image)
*
* @param imageUris List of image URIs selected by user
*/
fun scanAndTagFaces(imageUris: List<Uri>) {
currentImageUris = imageUris
manualFaceSelections.clear()
performScan(imageUris)
}
/**
* Replace a single image and re-scan all images.
*
* @param oldUri Image to replace
* @param newUri New image
*/
fun replaceImage(oldUri: Uri, newUri: Uri) {
viewModelScope.launch {
val updatedUris = currentImageUris.toMutableList()
val index = updatedUris.indexOf(oldUri)
if (index != -1) {
updatedUris[index] = newUri
currentImageUris = updatedUris
// Remove manual selection for old URI if any
manualFaceSelections.remove(oldUri)
// Re-scan all images
performScan(currentImageUris)
}
}
}
/**
* User manually selected a face from a multi-face image.
*
* @param imageUri Image with multiple faces
* @param faceIndex Which face the user selected (0-based)
* @param croppedFaceBitmap Cropped face bitmap
*/
fun selectFaceFromImage(imageUri: Uri, faceIndex: Int, croppedFaceBitmap: Bitmap) {
manualFaceSelections[imageUri] = ManualFaceSelection(faceIndex, croppedFaceBitmap)
// Re-process the results with the manual selection
val currentState = _uiState.value
if (currentState is ScanningState.Success) {
val updatedResult = applyManualSelections(currentState.sanityCheckResult)
_uiState.value = ScanningState.Success(updatedResult)
}
}
/**
* Perform the actual scanning.
*/
private fun performScan(imageUris: List<Uri>) {
viewModelScope.launch {
try {
_uiState.value = ScanningState.Processing(0, imageUris.size)
// Perform sanity checks
val result = sanityChecker.performSanityChecks(
imageUris = imageUris,
minImagesRequired = 10,
allowMultipleFaces = true, // Allow multiple faces - user can pick
duplicateSimilarityThreshold = 0.95
)
// Apply any manual face selections
val finalResult = applyManualSelections(result)
_uiState.value = ScanningState.Success(finalResult)
} catch (e: Exception) {
_uiState.value = ScanningState.Error(
e.message ?: "An unknown error occurred"
)
}
}
}
/**
* Apply manual face selections to the results.
*/
private fun applyManualSelections(
result: TrainingSanityChecker.SanityCheckResult
): TrainingSanityChecker.SanityCheckResult {
// If no manual selections, return original
if (manualFaceSelections.isEmpty()) {
return result
}
// Update face detection results with manual selections
val updatedFaceResults = result.faceDetectionResults.map { faceResult ->
val manualSelection = manualFaceSelections[faceResult.uri]
if (manualSelection != null) {
// Replace the cropped face with the manually selected one
faceResult.copy(
croppedFaceBitmap = manualSelection.croppedFaceBitmap,
// Treat as single face since user selected one
faceCount = 1
)
} else {
faceResult
}
}
// Update valid images list
val updatedValidImages = updatedFaceResults
.filter { it.hasFace }
.filter { it.croppedFaceBitmap != null }
.filter { it.errorMessage == null }
.filter { it.faceCount >= 1 } // Now accept if user picked a face
.map { result ->
TrainingSanityChecker.ValidTrainingImage(
uri = result.uri,
croppedFaceBitmap = result.croppedFaceBitmap!!,
faceCount = result.faceCount
)
}
// Recalculate validation errors
val updatedErrors = result.validationErrors.toMutableList()
// Remove multiple face errors for images with manual selections
updatedErrors.removeAll { error ->
error is TrainingSanityChecker.ValidationError.MultipleFacesDetected &&
manualFaceSelections.containsKey(error.uri)
}
// Check if we have enough valid images now
if (updatedValidImages.size < 10) {
if (updatedErrors.none { it is TrainingSanityChecker.ValidationError.InsufficientImages }) {
updatedErrors.add(
TrainingSanityChecker.ValidationError.InsufficientImages(
required = 10,
available = updatedValidImages.size
)
)
}
} else {
// Remove insufficient images error if we now have enough
updatedErrors.removeAll { it is TrainingSanityChecker.ValidationError.InsufficientImages }
}
val isValid = updatedErrors.isEmpty() && updatedValidImages.size >= 10
return result.copy(
isValid = isValid,
faceDetectionResults = updatedFaceResults,
validationErrors = updatedErrors,
validImagesWithFaces = updatedValidImages
)
}
/**
* Get formatted error messages.
*/
fun getFormattedErrors(result: TrainingSanityChecker.SanityCheckResult): List<String> {
return sanityChecker.formatValidationErrors(result.validationErrors)
}
/**
* Reset to idle state.
*/
fun reset() {
_uiState.value = ScanningState.Idle
_trainingState.value = TrainingState.Idle
currentImageUris = emptyList()
manualFaceSelections.clear()
}
override fun onCleared() {
super.onCleared()
sanityChecker.cleanup()
faceDetectionHelper.cleanup()
faceNetModel.close()
}
}
// ======================
// EXTENSION FUNCTIONS
// ======================
/**
* Extension to copy FaceDetectionResult with modifications.
*/
private fun FaceDetectionHelper.FaceDetectionResult.copy(
uri: Uri = this.uri,
hasFace: Boolean = this.hasFace,
faceCount: Int = this.faceCount,
faceBounds: List<android.graphics.Rect> = this.faceBounds,
croppedFaceBitmap: Bitmap? = this.croppedFaceBitmap,
errorMessage: String? = this.errorMessage
): FaceDetectionHelper.FaceDetectionResult {
return FaceDetectionHelper.FaceDetectionResult(
uri = uri,
hasFace = hasFace,
faceCount = faceCount,
faceBounds = faceBounds,
croppedFaceBitmap = croppedFaceBitmap,
errorMessage = errorMessage
)
}
/**
* Extension to copy SanityCheckResult with modifications.
*/
private fun TrainingSanityChecker.SanityCheckResult.copy(
isValid: Boolean = this.isValid,
faceDetectionResults: List<FaceDetectionHelper.FaceDetectionResult> = this.faceDetectionResults,
duplicateCheckResult: DuplicateImageDetector.DuplicateCheckResult = this.duplicateCheckResult,
validationErrors: List<TrainingSanityChecker.ValidationError> = this.validationErrors,
warnings: List<String> = this.warnings,
validImagesWithFaces: List<TrainingSanityChecker.ValidTrainingImage> = this.validImagesWithFaces
): TrainingSanityChecker.SanityCheckResult {
return TrainingSanityChecker.SanityCheckResult(
isValid = isValid,
faceDetectionResults = faceDetectionResults,
duplicateCheckResult = duplicateCheckResult,
validationErrors = validationErrors,
warnings = warnings,
validImagesWithFaces = validImagesWithFaces
)
}

View File

@@ -0,0 +1,31 @@
package com.placeholder.sherpai2.ui.trainingprep
import androidx.compose.foundation.layout.padding
import androidx.compose.material3.Button
import androidx.compose.material3.ExperimentalMaterial3Api
import androidx.compose.material3.Scaffold
import androidx.compose.material3.Text
import androidx.compose.material3.TopAppBar
import androidx.compose.runtime.Composable
import androidx.compose.ui.Modifier
import androidx.hilt.lifecycle.viewmodel.compose.hiltViewModel
@OptIn(ExperimentalMaterial3Api::class)
@Composable
fun TrainingScreen(
onSelectImages: () -> Unit
) {
Scaffold(
topBar = {
TopAppBar(
title = { Text("Training") }
)
}
) { padding ->
Button(
modifier = Modifier.padding(padding),
onClick = onSelectImages
) {
Text("Select Images")
}
}
}

View File

@@ -0,0 +1,188 @@
package com.placeholder.sherpai2.ui.trainingprep
import android.content.Context
import android.graphics.Bitmap
import android.net.Uri
/**
* Coordinates sanity checks for training images
*/
class TrainingSanityChecker(private val context: Context) {
private val faceDetectionHelper = FaceDetectionHelper(context)
private val duplicateDetector = DuplicateImageDetector(context)
data class SanityCheckResult(
val isValid: Boolean,
val faceDetectionResults: List<FaceDetectionHelper.FaceDetectionResult>,
val duplicateCheckResult: DuplicateImageDetector.DuplicateCheckResult,
val validationErrors: List<ValidationError>,
val warnings: List<String>,
val validImagesWithFaces: List<ValidTrainingImage>
)
data class ValidTrainingImage(
val uri: Uri,
val croppedFaceBitmap: Bitmap,
val faceCount: Int
)
sealed class ValidationError {
data class NoFaceDetected(val uris: List<Uri>) : ValidationError()
data class MultipleFacesDetected(val uri: Uri, val faceCount: Int) : ValidationError()
data class DuplicateImages(val groups: List<DuplicateImageDetector.DuplicateGroup>) : ValidationError()
data class InsufficientImages(val required: Int, val available: Int) : ValidationError()
data class ImageLoadError(val uri: Uri, val error: String) : ValidationError()
}
/**
* Perform comprehensive sanity checks on training images
*/
suspend fun performSanityChecks(
imageUris: List<Uri>,
minImagesRequired: Int = 10,
allowMultipleFaces: Boolean = false,
duplicateSimilarityThreshold: Double = 0.95
): SanityCheckResult {
val validationErrors = mutableListOf<ValidationError>()
val warnings = mutableListOf<String>()
// Check minimum image count
if (imageUris.size < minImagesRequired) {
validationErrors.add(
ValidationError.InsufficientImages(
required = minImagesRequired,
available = imageUris.size
)
)
}
// Step 1: Detect faces in all images
val faceDetectionResults = faceDetectionHelper.detectFacesInImages(imageUris)
// Check for images without faces
val imagesWithoutFaces = faceDetectionResults.filter { !it.hasFace }
if (imagesWithoutFaces.isNotEmpty()) {
validationErrors.add(
ValidationError.NoFaceDetected(
uris = imagesWithoutFaces.map { it.uri }
)
)
}
// Check for images with errors
faceDetectionResults.filter { it.errorMessage != null }.forEach { result ->
validationErrors.add(
ValidationError.ImageLoadError(
uri = result.uri,
error = result.errorMessage ?: "Unknown error"
)
)
}
// Check for images with multiple faces
if (!allowMultipleFaces) {
faceDetectionResults.filter { it.faceCount > 1 }.forEach { result ->
validationErrors.add(
ValidationError.MultipleFacesDetected(
uri = result.uri,
faceCount = result.faceCount
)
)
}
} else {
faceDetectionResults.filter { it.faceCount > 1 }.forEach { result ->
warnings.add("Image ${result.uri.lastPathSegment} contains ${result.faceCount} faces. Using the largest detected face.")
}
}
// Step 2: Check for duplicate images
val duplicateCheckResult = duplicateDetector.checkForDuplicates(
uris = imageUris,
similarityThreshold = duplicateSimilarityThreshold
)
if (duplicateCheckResult.hasDuplicates) {
validationErrors.add(
ValidationError.DuplicateImages(
groups = duplicateCheckResult.duplicateGroups
)
)
}
// Step 3: Create list of valid training images
val validImagesWithFaces = faceDetectionResults
.filter { it.hasFace && it.croppedFaceBitmap != null }
.filter { allowMultipleFaces || it.faceCount == 1 }
.map { result ->
ValidTrainingImage(
uri = result.uri,
croppedFaceBitmap = result.croppedFaceBitmap!!,
faceCount = result.faceCount
)
}
// Check if we have enough valid images after all checks
if (validImagesWithFaces.size < minImagesRequired) {
val existingError = validationErrors.find { it is ValidationError.InsufficientImages }
if (existingError == null) {
validationErrors.add(
ValidationError.InsufficientImages(
required = minImagesRequired,
available = validImagesWithFaces.size
)
)
}
}
val isValid = validationErrors.isEmpty() && validImagesWithFaces.size >= minImagesRequired
return SanityCheckResult(
isValid = isValid,
faceDetectionResults = faceDetectionResults,
duplicateCheckResult = duplicateCheckResult,
validationErrors = validationErrors,
warnings = warnings,
validImagesWithFaces = validImagesWithFaces
)
}
/**
* Format validation errors into human-readable messages
*/
fun formatValidationErrors(errors: List<ValidationError>): List<String> {
return errors.map { error ->
when (error) {
is ValidationError.NoFaceDetected -> {
val count = error.uris.size
val images = error.uris.joinToString(", ") { it.lastPathSegment ?: "Unknown" }
"No face detected in $count image(s): $images"
}
is ValidationError.MultipleFacesDetected -> {
"Multiple faces (${error.faceCount}) detected in: ${error.uri.lastPathSegment}"
}
is ValidationError.DuplicateImages -> {
val count = error.groups.size
val details = error.groups.joinToString("\n") { group ->
" - ${group.images.size} duplicates: ${group.images.joinToString(", ") { it.lastPathSegment ?: "Unknown" }}"
}
"Found $count duplicate group(s):\n$details"
}
is ValidationError.InsufficientImages -> {
"Insufficient images: need ${error.required}, but only ${error.available} valid images available"
}
is ValidationError.ImageLoadError -> {
"Failed to load image ${error.uri.lastPathSegment}: ${error.error}"
}
}
}
}
/**
* Clean up resources
*/
fun cleanup() {
faceDetectionHelper.cleanup()
}
}

View File

@@ -0,0 +1,78 @@
package com.placeholder.sherpai2.ui.trainingprep
import android.app.Application
import android.net.Uri
import androidx.lifecycle.AndroidViewModel
import androidx.lifecycle.viewModelScope
import kotlinx.coroutines.flow.MutableStateFlow
import kotlinx.coroutines.flow.StateFlow
import kotlinx.coroutines.flow.asStateFlow
import kotlinx.coroutines.launch
/**
* ViewModel for managing training image sanity checks
*/
class TrainingSanityViewModel(application: Application) : AndroidViewModel(application) {
private val sanityChecker = TrainingSanityChecker(application)
private val _uiState = MutableStateFlow<TrainingSanityUiState>(TrainingSanityUiState.Idle)
val uiState: StateFlow<TrainingSanityUiState> = _uiState.asStateFlow()
sealed class TrainingSanityUiState {
object Idle : TrainingSanityUiState()
object Checking : TrainingSanityUiState()
data class Success(
val result: TrainingSanityChecker.SanityCheckResult
) : TrainingSanityUiState()
data class Error(val message: String) : TrainingSanityUiState()
}
/**
* Perform sanity checks on selected images
*/
fun checkImages(
imageUris: List<Uri>,
minImagesRequired: Int = 10,
allowMultipleFaces: Boolean = false,
duplicateSimilarityThreshold: Double = 0.95
) {
viewModelScope.launch {
try {
_uiState.value = TrainingSanityUiState.Checking
val result = sanityChecker.performSanityChecks(
imageUris = imageUris,
minImagesRequired = minImagesRequired,
allowMultipleFaces = allowMultipleFaces,
duplicateSimilarityThreshold = duplicateSimilarityThreshold
)
_uiState.value = TrainingSanityUiState.Success(result)
} catch (e: Exception) {
_uiState.value = TrainingSanityUiState.Error(
e.message ?: "An unknown error occurred during sanity checks"
)
}
}
}
/**
* Reset the UI state
*/
fun resetState() {
_uiState.value = TrainingSanityUiState.Idle
}
/**
* Get formatted error messages from validation result
*/
fun getFormattedErrors(result: TrainingSanityChecker.SanityCheckResult): List<String> {
return sanityChecker.formatValidationErrors(result.validationErrors)
}
override fun onCleared() {
super.onCleared()
sanityChecker.cleanup()
}
}

View File

@@ -3,4 +3,9 @@ plugins {
alias(libs.plugins.android.application) apply false alias(libs.plugins.android.application) apply false
alias(libs.plugins.kotlin.android) apply false alias(libs.plugins.kotlin.android) apply false
alias(libs.plugins.kotlin.compose) apply false alias(libs.plugins.kotlin.compose) apply false
//Adding these two fixes the conflicts between hilt / room / ksp and javbapoet (cannonicalName())
//https://github.com/google/dagger/issues/4048#issuecomment-1864237679
alias(libs.plugins.ksp) apply false
alias(libs.plugins.hilt.android) apply false
} }

View File

@@ -1,32 +1,77 @@
[versions] [versions]
agp = "8.13.1" # Tooling
agp = "8.13.2"
kotlin = "2.0.21" kotlin = "2.0.21"
coreKtx = "1.17.0" ksp = "2.0.21-1.0.28"
junit = "4.13.2"
junitVersion = "1.3.0" # AndroidX / Lifecycle
espressoCore = "3.7.0" coreKtx = "1.15.0"
lifecycleRuntimeKtx = "2.10.0" lifecycle = "2.8.7"
activityCompose = "1.12.1" activityCompose = "1.9.3"
composeBom = "2024.09.00" composeBom = "2025.12.01"
navigationCompose = "2.8.5"
hiltNavigationCompose = "1.3.0"
# DI & Database
hilt = "2.57.2"
room = "2.8.4"
# Images
coil = "2.7.0"
# Face Detect
mlkit-face-detection = "16.1.6"
coroutines-play-services = "1.8.1"
# Models
tensorflow-lite = "2.14.0"
tensorflow-lite-support = "0.4.4"
gson = "2.10.1"
[libraries] [libraries]
androidx-core-ktx = { group = "androidx.core", name = "core-ktx", version.ref = "coreKtx" } androidx-core-ktx = { group = "androidx.core", name = "core-ktx", version.ref = "coreKtx" }
junit = { group = "junit", name = "junit", version.ref = "junit" } androidx-lifecycle-runtime-ktx = { group = "androidx.lifecycle", name = "lifecycle-runtime-ktx", version.ref = "lifecycle" }
androidx-junit = { group = "androidx.test.ext", name = "junit", version.ref = "junitVersion" } androidx-lifecycle-viewmodel-compose = { group = "androidx.lifecycle", name = "lifecycle-viewmodel-compose", version.ref = "lifecycle" }
androidx-espresso-core = { group = "androidx.test.espresso", name = "espresso-core", version.ref = "espressoCore" }
androidx-lifecycle-runtime-ktx = { group = "androidx.lifecycle", name = "lifecycle-runtime-ktx", version.ref = "lifecycleRuntimeKtx" }
androidx-activity-compose = { group = "androidx.activity", name = "activity-compose", version.ref = "activityCompose" } androidx-activity-compose = { group = "androidx.activity", name = "activity-compose", version.ref = "activityCompose" }
# Compose
androidx-compose-bom = { group = "androidx.compose", name = "compose-bom", version.ref = "composeBom" } androidx-compose-bom = { group = "androidx.compose", name = "compose-bom", version.ref = "composeBom" }
androidx-compose-ui = { group = "androidx.compose.ui", name = "ui" } androidx-compose-ui = { group = "androidx.compose.ui", name = "ui" }
androidx-compose-ui-graphics = { group = "androidx.compose.ui", name = "ui-graphics" } androidx-compose-ui-graphics = { group = "androidx.compose.ui", name = "ui-graphics" }
androidx-compose-ui-tooling = { group = "androidx.compose.ui", name = "ui-tooling" } androidx-compose-ui-tooling = { group = "androidx.compose.ui", name = "ui-tooling" }
androidx-compose-ui-tooling-preview = { group = "androidx.compose.ui", name = "ui-tooling-preview" } androidx-compose-ui-tooling-preview = { group = "androidx.compose.ui", name = "ui-tooling-preview" }
androidx-compose-ui-test-manifest = { group = "androidx.compose.ui", name = "ui-test-manifest" }
androidx-compose-ui-test-junit4 = { group = "androidx.compose.ui", name = "ui-test-junit4" }
androidx-compose-material3 = { group = "androidx.compose.material3", name = "material3" } androidx-compose-material3 = { group = "androidx.compose.material3", name = "material3" }
androidx-compose-material-icons = { group = "androidx.compose.material", name = "material-icons-extended" }
# Navigation & Hilt
androidx-navigation-compose = { group = "androidx.navigation", name = "navigation-compose", version.ref = "navigationCompose" }
androidx-hilt-navigation-compose = { group = "androidx.hilt", name = "hilt-navigation-compose", version.ref = "hiltNavigationCompose" }
hilt-android = { group = "com.google.dagger", name = "hilt-android", version.ref = "hilt" }
hilt-compiler = { group = "com.google.dagger", name = "hilt-compiler", version.ref = "hilt" }
# Room
room-runtime = { group = "androidx.room", name = "room-runtime", version.ref = "room" }
room-ktx = { group = "androidx.room", name = "room-ktx", version.ref = "room" }
room-compiler = { group = "androidx.room", name = "room-compiler", version.ref = "room" }
# Misc
coil-compose = { group = "io.coil-kt", name = "coil-compose", version.ref = "coil" }
#Face Detect
mlkit-face-detection = { group = "com.google.mlkit", name = "face-detection", version.ref = "mlkit-face-detection"}
kotlinx-coroutines-play-services = {group = "org.jetbrains.kotlinx",name = "kotlinx-coroutines-play-services",version.ref = "coroutines-play-services"}
# TensorFlow Lite for FaceNet
tensorflow-lite = { group = "org.tensorflow", name = "tensorflow-lite", version.ref = "tensorflow-lite" }
tensorflow-lite-support = { group = "org.tensorflow", name = "tensorflow-lite-support", version.ref = "tensorflow-lite-support" }
tensorflow-lite-gpu = { group = "org.tensorflow", name = "tensorflow-lite-gpu", version.ref = "tensorflow-lite" }
gson = { group = "com.google.code.gson", name = "gson", version.ref = "gson" }
[plugins] [plugins]
android-application = { id = "com.android.application", version.ref = "agp" } android-application = { id = "com.android.application", version.ref = "agp" }
kotlin-android = { id = "org.jetbrains.kotlin.android", version.ref = "kotlin" } kotlin-android = { id = "org.jetbrains.kotlin.android", version.ref = "kotlin" }
kotlin-compose = { id = "org.jetbrains.kotlin.plugin.compose", version.ref = "kotlin" } kotlin-compose = { id = "org.jetbrains.kotlin.plugin.compose", version.ref = "kotlin" }
ksp = { id = "com.google.devtools.ksp", version.ref = "ksp" }
hilt-android = { id = "com.google.dagger.hilt.android", version.ref = "hilt" }

View File

@@ -1,6 +1,6 @@
#Tue Dec 09 23:28:28 EST 2025 #Tue Dec 09 23:28:28 EST 2025
distributionBase=GRADLE_USER_HOME distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.13-bin.zip distributionUrl=https\://services.gradle.org/distributions/gradle-8.14-bin.zip
zipStoreBase=GRADLE_USER_HOME zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists zipStorePath=wrapper/dists