Improved Training Screen and underlying

Added diagnostic view model with flag for picture detection but broke fucking everything meassing with tagDAO. au demain
This commit is contained in:
genki
2026-01-08 00:02:27 -05:00
parent 6ce115baa9
commit 51fdfbf3d6
15 changed files with 1411 additions and 380 deletions

View File

@@ -4,6 +4,14 @@
<selectionStates> <selectionStates>
<SelectionState runConfigName="app"> <SelectionState runConfigName="app">
<option name="selectionMode" value="DROPDOWN" /> <option name="selectionMode" value="DROPDOWN" />
<DropdownSelection timestamp="2026-01-08T02:44:48.809354959Z">
<Target type="DEFAULT_BOOT">
<handle>
<DeviceId pluginId="LocalEmulator" identifier="path=/home/genki/.android/avd/Medium_Phone.avd" />
</handle>
</Target>
</DropdownSelection>
<DialogSelection />
</SelectionState> </SelectionState>
</selectionStates> </selectionStates>
</component> </component>

View File

@@ -21,7 +21,6 @@ import com.placeholder.sherpai2.data.local.entity.*
TagEntity::class, TagEntity::class,
EventEntity::class, EventEntity::class,
ImageTagEntity::class, ImageTagEntity::class,
ImagePersonEntity::class,
ImageEventEntity::class, ImageEventEntity::class,
// ===== NEW ENTITIES ===== // ===== NEW ENTITIES =====
@@ -29,7 +28,7 @@ import com.placeholder.sherpai2.data.local.entity.*
FaceModelEntity::class, // NEW: Face embeddings FaceModelEntity::class, // NEW: Face embeddings
PhotoFaceTagEntity::class // NEW: Face tags PhotoFaceTagEntity::class // NEW: Face tags
], ],
version = 3, version = 4,
exportSchema = false exportSchema = false
) )
// No TypeConverters needed - embeddings stored as strings // No TypeConverters needed - embeddings stored as strings
@@ -40,7 +39,6 @@ abstract class AppDatabase : RoomDatabase() {
abstract fun tagDao(): TagDao abstract fun tagDao(): TagDao
abstract fun eventDao(): EventDao abstract fun eventDao(): EventDao
abstract fun imageTagDao(): ImageTagDao abstract fun imageTagDao(): ImageTagDao
abstract fun imagePersonDao(): ImagePersonDao
abstract fun imageEventDao(): ImageEventDao abstract fun imageEventDao(): ImageEventDao
abstract fun imageAggregateDao(): ImageAggregateDao abstract fun imageAggregateDao(): ImageAggregateDao

View File

@@ -1,25 +0,0 @@
package com.placeholder.sherpai2.data.local.dao
import androidx.room.Dao
import androidx.room.Insert
import androidx.room.OnConflictStrategy
import androidx.room.Query
import com.placeholder.sherpai2.data.local.entity.ImagePersonEntity
@Dao
interface ImagePersonDao {
@Insert(onConflict = OnConflictStrategy.REPLACE)
suspend fun upsert(entity: ImagePersonEntity)
/**
* All images containing a specific person.
*/
@Query("""
SELECT imageId FROM image_persons
WHERE personId = :personId
AND visibility = 'PUBLIC'
AND confirmed = 1
""")
suspend fun findImagesForPerson(personId: String): List<String>
}

View File

@@ -50,4 +50,6 @@ interface ImageTagDao {
""") """)
fun getTagsForImage(imageId: String): Flow<List<TagEntity>> fun getTagsForImage(imageId: String): Flow<List<TagEntity>>
} }

View File

@@ -4,21 +4,206 @@ import androidx.room.Dao
import androidx.room.Insert import androidx.room.Insert
import androidx.room.OnConflictStrategy import androidx.room.OnConflictStrategy
import androidx.room.Query import androidx.room.Query
import com.placeholder.sherpai2.data.local.entity.ImageEntity
import com.placeholder.sherpai2.data.local.entity.TagEntity import com.placeholder.sherpai2.data.local.entity.TagEntity
import com.placeholder.sherpai2.data.local.entity.TagWithUsage
import kotlinx.coroutines.flow.Flow
/**
* TagDao - Tag management with face recognition integration
*/
@Dao @Dao
interface TagDao { interface TagDao {
@Insert(onConflict = OnConflictStrategy.IGNORE) // ======================
suspend fun insert(tag: TagEntity) // BASIC OPERATIONS
// ======================
@Insert(onConflict = OnConflictStrategy.IGNORE)
suspend fun insert(tag: TagEntity): Long
/**
* Resolve a tag by value.
* Example: "park"
*/
@Query("SELECT * FROM tags WHERE value = :value LIMIT 1") @Query("SELECT * FROM tags WHERE value = :value LIMIT 1")
suspend fun getByValue(value: String): TagEntity? suspend fun getByValue(value: String): TagEntity?
@Query("SELECT * FROM tags") @Query("SELECT * FROM tags WHERE tagId = :tagId")
suspend fun getById(tagId: String): TagEntity?
@Query("SELECT * FROM tags ORDER BY value ASC")
suspend fun getAll(): List<TagEntity> suspend fun getAll(): List<TagEntity>
@Query("SELECT * FROM tags ORDER BY value ASC")
fun getAllFlow(): Flow<List<TagEntity>>
@Query("SELECT * FROM tags WHERE type = :type ORDER BY value ASC")
suspend fun getByType(type: String): List<TagEntity>
@Query("DELETE FROM tags WHERE tagId = :tagId")
suspend fun delete(tagId: String)
// ======================
// STATISTICS (returns TagWithUsage)
// ======================
/**
* Get most used tags WITH usage counts
*/
@Query("""
SELECT t.tagId, t.type, t.value, t.createdAt,
COUNT(it.imageId) as usage_count
FROM tags t
LEFT JOIN image_tags it ON t.tagId = it.tagId
GROUP BY t.tagId
ORDER BY usage_count DESC
LIMIT :limit
""")
suspend fun getMostUsedTags(limit: Int = 10): List<TagWithUsage>
/**
* Get tag usage count
*/
@Query("""
SELECT COUNT(DISTINCT it.imageId)
FROM image_tags it
WHERE it.tagId = :tagId
""")
suspend fun getTagUsageCount(tagId: String): Int
// ======================
// PERSON INTEGRATION
// ======================
/**
* Get all tags used for images containing a specific person
*/
@Query("""
SELECT DISTINCT t.* FROM tags t
INNER JOIN image_tags it ON t.tagId = it.tagId
INNER JOIN photo_face_tags pft ON it.imageId = pft.imageId
INNER JOIN face_models fm ON pft.faceModelId = fm.id
WHERE fm.personId = :personId
ORDER BY t.value ASC
""")
suspend fun getTagsForPerson(personId: String): List<TagEntity>
/**
* Get images that have both a specific tag AND contain a specific person
*/
@Query("""
SELECT DISTINCT i.* FROM images i
INNER JOIN image_tags it ON i.imageId = it.imageId
INNER JOIN photo_face_tags pft ON i.imageId = pft.imageId
INNER JOIN face_models fm ON pft.faceModelId = fm.id
WHERE it.tagId = :tagId AND fm.personId = :personId
ORDER BY i.capturedAt DESC
""")
suspend fun getImagesWithTagAndPerson(
tagId: String,
personId: String
): List<ImageEntity>
/**
* Get images with tag and person as Flow
*/
@Query("""
SELECT DISTINCT i.* FROM images i
INNER JOIN image_tags it ON i.imageId = it.imageId
INNER JOIN photo_face_tags pft ON i.imageId = pft.imageId
INNER JOIN face_models fm ON pft.faceModelId = fm.id
WHERE it.tagId = :tagId AND fm.personId = :personId
ORDER BY i.capturedAt DESC
""")
fun getImagesWithTagAndPersonFlow(
tagId: String,
personId: String
): Flow<List<ImageEntity>>
/**
* Count images with tag and person
*/
@Query("""
SELECT COUNT(DISTINCT i.imageId) FROM images i
INNER JOIN image_tags it ON i.imageId = it.imageId
INNER JOIN photo_face_tags pft ON i.imageId = pft.imageId
INNER JOIN face_models fm ON pft.faceModelId = fm.id
WHERE it.tagId = :tagId AND fm.personId = :personId
""")
suspend fun countImagesWithTagAndPerson(
tagId: String,
personId: String
): Int
// ======================
// AUTO-SUGGESTIONS
// ======================
/**
* Suggest tags based on person's relationship
*/
@Query("""
SELECT DISTINCT t.* FROM tags t
INNER JOIN image_tags it ON t.tagId = it.tagId
INNER JOIN photo_face_tags pft ON it.imageId = pft.imageId
INNER JOIN face_models fm ON pft.faceModelId = fm.id
INNER JOIN persons p ON fm.personId = p.id
WHERE p.relationship = :relationship
AND p.id != :excludePersonId
GROUP BY t.tagId
ORDER BY COUNT(it.imageId) DESC
LIMIT :limit
""")
suspend fun suggestTagsBasedOnRelationship(
relationship: String,
excludePersonId: String,
limit: Int = 5
): List<TagEntity>
/**
* Get tags commonly used with this tag
*/
@Query("""
SELECT DISTINCT t2.* FROM tags t2
INNER JOIN image_tags it2 ON t2.tagId = it2.tagId
WHERE it2.imageId IN (
SELECT it1.imageId FROM image_tags it1
WHERE it1.tagId = :tagId
)
AND t2.tagId != :tagId
GROUP BY t2.tagId
ORDER BY COUNT(it2.imageId) DESC
LIMIT :limit
""")
suspend fun getRelatedTags(
tagId: String,
limit: Int = 5
): List<TagEntity>
// ======================
// SEARCH
// ======================
/**
* Search tags by value (partial match)
*/
@Query("""
SELECT * FROM tags
WHERE value LIKE '%' || :query || '%'
ORDER BY value ASC
LIMIT :limit
""")
suspend fun searchTags(query: String, limit: Int = 20): List<TagEntity>
/**
* Search tags with usage count
*/
@Query("""
SELECT t.tagId, t.type, t.value, t.createdAt,
COUNT(it.imageId) as usage_count
FROM tags t
LEFT JOIN image_tags it ON t.tagId = it.tagId
WHERE t.value LIKE '%' || :query || '%'
GROUP BY t.tagId
ORDER BY usage_count DESC, t.value ASC
LIMIT :limit
""")
suspend fun searchTagsWithUsage(query: String, limit: Int = 20): List<TagWithUsage>
} }

View File

@@ -18,15 +18,92 @@ import java.util.UUID
Index(value = ["name"]) Index(value = ["name"])
] ]
) )
/**
* PersonEntity - Represents a person in your app
*
* CLEAN DESIGN:
* - Uses String UUID for id (matches your ImageEntity.imageId pattern)
* - Face embeddings stored separately in FaceModelEntity
* - Simple, extensible schema
* - Now includes DOB and relationship for better organization
*/
data class PersonEntity( data class PersonEntity(
@PrimaryKey @PrimaryKey
val id: String = UUID.randomUUID().toString(), val id: String = UUID.randomUUID().toString(),
/**
* Person's name (required)
*/
val name: String, val name: String,
val createdAt: Long = System.currentTimeMillis(),
val updatedAt: Long = System.currentTimeMillis()
)
/**
* Date of birth (optional)
* Stored as Unix timestamp (milliseconds)
*/
val dateOfBirth: Long? = null,
/**
* Relationship to user (optional)
* Examples: "Family", "Friend", "Partner", "Child", "Parent", "Sibling", "Colleague", "Other"
*/
val relationship: String? = null,
/**
* When this person was added
*/
val createdAt: Long = System.currentTimeMillis(),
/**
* Last time this person's data was updated
*/
val updatedAt: Long = System.currentTimeMillis()
) {
companion object {
/**
* Create PersonEntity with optional fields
*/
fun create(
name: String,
dateOfBirth: Long? = null,
relationship: String? = null
): PersonEntity {
return PersonEntity(
name = name,
dateOfBirth = dateOfBirth,
relationship = relationship
)
}
}
/**
* Calculate age if date of birth is available
*/
fun getAge(): Int? {
if (dateOfBirth == null) return null
val now = System.currentTimeMillis()
val ageInMillis = now - dateOfBirth
val ageInYears = ageInMillis / (1000L * 60 * 60 * 24 * 365)
return ageInYears.toInt()
}
/**
* Get relationship emoji
*/
fun getRelationshipEmoji(): String {
return when (relationship) {
"Family" -> "👨‍👩‍👧‍👦"
"Friend" -> "🤝"
"Partner" -> "❤️"
"Child" -> "👶"
"Parent" -> "👪"
"Sibling" -> "👫"
"Colleague" -> "💼"
else -> "👤"
}
}
}
/** /**
* FaceModelEntity - Stores face recognition model (embedding) for a person * FaceModelEntity - Stores face recognition model (embedding) for a person
* *

View File

@@ -1,40 +0,0 @@
package com.placeholder.sherpai2.data.local.entity
import androidx.room.Entity
import androidx.room.ForeignKey
import androidx.room.Index
@Entity(
tableName = "image_persons",
primaryKeys = ["imageId", "personId"],
foreignKeys = [
ForeignKey(
entity = ImageEntity::class,
parentColumns = ["imageId"],
childColumns = ["imageId"],
onDelete = ForeignKey.CASCADE
),
ForeignKey(
entity = PersonEntity::class,
parentColumns = ["id"],
childColumns = ["personId"],
onDelete = ForeignKey.CASCADE
)
],
indices = [
Index("personId")
]
)
data class ImagePersonEntity(
val imageId: String,
val personId: String,
val confidence: Float,
val confirmed: Boolean,
/**
* PUBLIC | PRIVATE
*/
val visibility: String
)

View File

@@ -1,30 +1,119 @@
package com.placeholder.sherpai2.data.local.entity package com.placeholder.sherpai2.data.local.entity
import androidx.room.ColumnInfo
import androidx.room.Entity import androidx.room.Entity
import androidx.room.PrimaryKey import androidx.room.PrimaryKey
import java.util.UUID
/** /**
* Represents a conceptual tag. * TagEntity - Normalized tag storage
* *
* Tags are normalized so that: * DESIGN:
* - "park" exists once * - Tags exist once (e.g., "vacation")
* - many images can reference it * - Multiple images reference via ImageTagEntity junction table
* - Type system: GENERIC | SYSTEM | HIDDEN
*/ */
@Entity(tableName = "tags") @Entity(tableName = "tags")
data class TagEntity( data class TagEntity(
@PrimaryKey @PrimaryKey
val tagId: String, val tagId: String = UUID.randomUUID().toString(),
/** /**
* GENERIC | SYSTEM | HIDDEN * Tag type: GENERIC | SYSTEM | HIDDEN
*/ */
val type: String, val type: String = TagType.GENERIC,
/** /**
* Human-readable value, e.g. "park", "sunset" * Human-readable value, e.g. "vacation", "beach"
*/ */
val value: String, val value: String,
val createdAt: Long /**
* When tag was created
*/
val createdAt: Long = System.currentTimeMillis()
) {
companion object {
fun createUserTag(value: String): TagEntity {
return TagEntity(
type = TagType.GENERIC,
value = value.trim().lowercase()
) )
}
fun createSystemTag(value: String): TagEntity {
return TagEntity(
type = TagType.SYSTEM,
value = value.trim().lowercase()
)
}
fun createHiddenTag(value: String): TagEntity {
return TagEntity(
type = TagType.HIDDEN,
value = value.trim().lowercase()
)
}
}
fun isUserTag(): Boolean = type == TagType.GENERIC
fun isSystemTag(): Boolean = type == TagType.SYSTEM
fun isHiddenTag(): Boolean = type == TagType.HIDDEN
fun getDisplayValue(): String = value.replaceFirstChar { it.uppercase() }
}
/**
* TagWithUsage - For queries that include usage count
*
* Use this for statistics queries
*/
data class TagWithUsage(
@ColumnInfo(name = "tagId")
val tagId: String,
@ColumnInfo(name = "type")
val type: String,
@ColumnInfo(name = "value")
val value: String,
@ColumnInfo(name = "createdAt")
val createdAt: Long,
@ColumnInfo(name = "usage_count")
val usageCount: Int
) {
/**
* Convert to TagEntity (without usage count)
*/
fun toTagEntity(): TagEntity {
return TagEntity(
tagId = tagId,
type = type,
value = value,
createdAt = createdAt
)
}
}
/**
* Tag type constants
*/
object TagType {
const val GENERIC = "GENERIC"
const val SYSTEM = "SYSTEM"
const val HIDDEN = "HIDDEN"
}
/**
* Common system tag values
*/
object SystemTags {
const val HAS_FACES = "has_faces"
const val MULTIPLE_PEOPLE = "multiple_people"
const val LANDSCAPE = "landscape"
const val PORTRAIT = "portrait"
const val LOW_QUALITY = "low_quality"
const val BLURRY = "blurry"
}

View File

@@ -15,12 +15,6 @@ data class ImageWithEverything(
) )
val tags: List<ImageTagEntity>, val tags: List<ImageTagEntity>,
@Relation(
parentColumn = "imageId",
entityColumn = "imageId"
)
val persons: List<ImagePersonEntity>,
@Relation( @Relation(
parentColumn = "imageId", parentColumn = "imageId",
entityColumn = "imageId" entityColumn = "imageId"

View File

@@ -332,6 +332,42 @@ class FaceRecognitionRepository @Inject constructor(
faceModelDao.deleteFaceModelById(faceModelId) faceModelDao.deleteFaceModelById(faceModelId)
} }
// Add this method to FaceRecognitionRepository_StringIds.kt
// Replace the existing createPersonWithFaceModel method with this version:
/**
* Create a new person with face model in one operation.
* Now supports full PersonEntity with optional fields.
*
* @param person PersonEntity with name, DOB, relationship, etc.
* @return PersonId (String UUID)
*/
suspend fun createPersonWithFaceModel(
person: PersonEntity,
validImages: List<TrainingSanityChecker.ValidTrainingImage>,
onProgress: (Int, Int) -> Unit = { _, _ -> }
): String = withContext(Dispatchers.IO) {
// Insert person with all fields
personDao.insert(person)
// Train face model
trainPerson(
personId = person.id,
validImages = validImages,
onProgress = onProgress
)
person.id
}
/**
* Get face model by ID
*/
suspend fun getFaceModelById(faceModelId: String): FaceModelEntity? = withContext(Dispatchers.IO) {
faceModelDao.getFaceModelById(faceModelId)
}
suspend fun deleteTagsForImage(imageId: String) { suspend fun deleteTagsForImage(imageId: String) {
photoFaceTagDao.deleteTagsForImage(imageId) photoFaceTagDao.deleteTagsForImage(imageId)
} }
@@ -339,6 +375,8 @@ class FaceRecognitionRepository @Inject constructor(
fun cleanup() { fun cleanup() {
faceNetModel.close() faceNetModel.close()
} }
} }
data class DetectedFace( data class DetectedFace(
@@ -355,3 +393,4 @@ data class PersonFaceStats(
val averageConfidence: Float, val averageConfidence: Float,
val lastDetectedAt: Long? val lastDetectedAt: Long?
) )

View File

@@ -14,6 +14,8 @@ import com.placeholder.sherpai2.data.repository.DetectedFace
import com.placeholder.sherpai2.data.repository.FaceRecognitionRepository import com.placeholder.sherpai2.data.repository.FaceRecognitionRepository
import com.placeholder.sherpai2.data.repository.PersonFaceStats import com.placeholder.sherpai2.data.repository.PersonFaceStats
import com.placeholder.sherpai2.domain.repository.ImageRepository import com.placeholder.sherpai2.domain.repository.ImageRepository
import com.placeholder.sherpai2.util.DebugFlags
import com.placeholder.sherpai2.util.DiagnosticLogger
import dagger.hilt.android.lifecycle.HiltViewModel import dagger.hilt.android.lifecycle.HiltViewModel
import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.delay import kotlinx.coroutines.delay
@@ -27,13 +29,11 @@ import kotlinx.coroutines.tasks.await
import javax.inject.Inject import javax.inject.Inject
/** /**
* PersonInventoryViewModel - Manage trained face models * PersonInventoryViewModel - Single version with feature flags
* *
* Features: * Toggle diagnostics in DebugFlags.kt:
* - List all trained persons with stats * - ENABLE_FACE_RECOGNITION_LOGGING = true/false
* - Delete models * - USE_LIBERAL_THRESHOLDS = true/false
* - SCAN LIBRARY to find person in all photos
* - View sample photos
*/ */
@HiltViewModel @HiltViewModel
class PersonInventoryViewModel @Inject constructor( class PersonInventoryViewModel @Inject constructor(
@@ -48,13 +48,12 @@ class PersonInventoryViewModel @Inject constructor(
private val _scanningState = MutableStateFlow<ScanningState>(ScanningState.Idle) private val _scanningState = MutableStateFlow<ScanningState>(ScanningState.Idle)
val scanningState: StateFlow<ScanningState> = _scanningState.asStateFlow() val scanningState: StateFlow<ScanningState> = _scanningState.asStateFlow()
// ML Kit face detector
private val faceDetector by lazy { private val faceDetector by lazy {
val options = FaceDetectorOptions.Builder() val options = FaceDetectorOptions.Builder()
.setPerformanceMode(FaceDetectorOptions.PERFORMANCE_MODE_ACCURATE) .setPerformanceMode(FaceDetectorOptions.PERFORMANCE_MODE_ACCURATE)
.setLandmarkMode(FaceDetectorOptions.LANDMARK_MODE_NONE) .setLandmarkMode(FaceDetectorOptions.LANDMARK_MODE_NONE)
.setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_NONE) .setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_NONE)
.setMinFaceSize(0.15f) .setMinFaceSize(0.10f) // Lower for better detection
.build() .build()
FaceDetection.getClient(options) FaceDetection.getClient(options)
} }
@@ -77,12 +76,14 @@ class PersonInventoryViewModel @Inject constructor(
val personName: String, val personName: String,
val progress: Int, val progress: Int,
val total: Int, val total: Int,
val facesFound: Int val facesFound: Int,
val facesDetected: Int = 0
) : ScanningState() ) : ScanningState()
data class Complete( data class Complete(
val personName: String, val personName: String,
val facesFound: Int, val facesFound: Int,
val imagesScanned: Int val imagesScanned: Int,
val totalFacesDetected: Int = 0
) : ScanningState() ) : ScanningState()
} }
@@ -90,9 +91,6 @@ class PersonInventoryViewModel @Inject constructor(
loadPersons() loadPersons()
} }
/**
* Load all trained persons with their stats
*/
fun loadPersons() { fun loadPersons() {
viewModelScope.launch { viewModelScope.launch {
try { try {
@@ -119,14 +117,11 @@ class PersonInventoryViewModel @Inject constructor(
} }
} }
/**
* Delete a face model
*/
fun deletePerson(personId: String, faceModelId: String) { fun deletePerson(personId: String, faceModelId: String) {
viewModelScope.launch { viewModelScope.launch {
try { try {
faceRecognitionRepository.deleteFaceModel(faceModelId) faceRecognitionRepository.deleteFaceModel(faceModelId)
loadPersons() // Refresh list loadPersons()
} catch (e: Exception) { } catch (e: Exception) {
_uiState.value = InventoryUiState.Error( _uiState.value = InventoryUiState.Error(
"Failed to delete: ${e.message}" "Failed to delete: ${e.message}"
@@ -136,21 +131,17 @@ class PersonInventoryViewModel @Inject constructor(
} }
/** /**
* Scan entire photo library for a specific person * Scan library with optional diagnostic logging
*
* Process:
* 1. Get all images from library
* 2. For each image:
* - Detect faces using ML Kit
* - Generate embeddings for detected faces
* - Compare to person's face model
* - Create PhotoFaceTagEntity if match found
* 3. Update progress throughout
*/ */
fun scanLibraryForPerson(personId: String, faceModelId: String) { fun scanLibraryForPerson(personId: String, faceModelId: String) {
viewModelScope.launch { viewModelScope.launch {
try { try {
// Get person name for UI if (DebugFlags.ENABLE_FACE_RECOGNITION_LOGGING) {
DiagnosticLogger.i("=== STARTING LIBRARY SCAN ===")
DiagnosticLogger.i("PersonId: $personId")
DiagnosticLogger.i("FaceModelId: $faceModelId")
}
val currentState = _uiState.value val currentState = _uiState.value
val person = if (currentState is InventoryUiState.Success) { val person = if (currentState is InventoryUiState.Success) {
currentState.persons.find { it.person.id == personId }?.person currentState.persons.find { it.person.id == personId }?.person
@@ -158,69 +149,104 @@ class PersonInventoryViewModel @Inject constructor(
val personName = person?.name ?: "Unknown" val personName = person?.name ?: "Unknown"
// Get all images from library // Get face model to determine training count
val faceModel = faceRecognitionRepository.getFaceModelById(faceModelId)
val trainingCount = faceModel?.trainingImageCount ?: 15
// Dynamic threshold based on training data and debug flag
val scanThreshold = if (DebugFlags.USE_LIBERAL_THRESHOLDS) {
when {
trainingCount < 20 -> 0.48f // Very liberal
trainingCount < 30 -> 0.52f // Liberal
else -> 0.58f // Moderate
}
} else {
when {
trainingCount < 20 -> 0.55f // Moderate
trainingCount < 30 -> 0.60f // Conservative
else -> 0.65f // Strict
}
}
DiagnosticLogger.i("Training count: $trainingCount")
DiagnosticLogger.i("Using threshold: $scanThreshold")
val allImages = imageRepository.getAllImages().first() val allImages = imageRepository.getAllImages().first()
val totalImages = allImages.size val totalImages = allImages.size
DiagnosticLogger.i("Total images in library: $totalImages")
_scanningState.value = ScanningState.Scanning( _scanningState.value = ScanningState.Scanning(
personId = personId, personId = personId,
personName = personName, personName = personName,
progress = 0, progress = 0,
total = totalImages, total = totalImages,
facesFound = 0 facesFound = 0,
facesDetected = 0
) )
var facesFound = 0 var facesFound = 0
var totalFacesDetected = 0
// Scan each image
allImages.forEachIndexed { index, imageWithEverything -> allImages.forEachIndexed { index, imageWithEverything ->
val image = imageWithEverything.image val image = imageWithEverything.image
// Detect faces in this image DiagnosticLogger.d("--- Image ${index + 1}/$totalImages ---")
DiagnosticLogger.d("ImageId: ${image.imageId}")
val detectedFaces = detectFacesInImage(image.imageUri) val detectedFaces = detectFacesInImage(image.imageUri)
totalFacesDetected += detectedFaces.size
DiagnosticLogger.d("Faces detected: ${detectedFaces.size}")
if (detectedFaces.isNotEmpty()) { if (detectedFaces.isNotEmpty()) {
// Scan this image for the person
val tags = faceRecognitionRepository.scanImage( val tags = faceRecognitionRepository.scanImage(
imageId = image.imageId, imageId = image.imageId,
detectedFaces = detectedFaces, detectedFaces = detectedFaces,
threshold = 0.6f // Slightly lower threshold for library scanning threshold = scanThreshold
) )
// Count how many faces matched this person DiagnosticLogger.d("Tags created: ${tags.size}")
val matchingTags = tags.filter { tag ->
// Check if this tag belongs to our target person's face model tags.forEach { tag ->
tag.faceModelId == faceModelId DiagnosticLogger.d(" Tag: model=${tag.faceModelId.take(8)}, conf=${String.format("%.3f", tag.confidence)}")
} }
val matchingTags = tags.filter { it.faceModelId == faceModelId }
DiagnosticLogger.d("Matching tags for target: ${matchingTags.size}")
facesFound += matchingTags.size facesFound += matchingTags.size
} }
// Update progress
_scanningState.value = ScanningState.Scanning( _scanningState.value = ScanningState.Scanning(
personId = personId, personId = personId,
personName = personName, personName = personName,
progress = index + 1, progress = index + 1,
total = totalImages, total = totalImages,
facesFound = facesFound facesFound = facesFound,
facesDetected = totalFacesDetected
) )
} }
// Scan complete DiagnosticLogger.i("=== SCAN COMPLETE ===")
DiagnosticLogger.i("Images scanned: $totalImages")
DiagnosticLogger.i("Faces detected: $totalFacesDetected")
DiagnosticLogger.i("Faces matched: $facesFound")
DiagnosticLogger.i("Hit rate: ${if (totalFacesDetected > 0) (facesFound * 100 / totalFacesDetected) else 0}%")
_scanningState.value = ScanningState.Complete( _scanningState.value = ScanningState.Complete(
personName = personName, personName = personName,
facesFound = facesFound, facesFound = facesFound,
imagesScanned = totalImages imagesScanned = totalImages,
totalFacesDetected = totalFacesDetected
) )
// Refresh the list to show updated counts
loadPersons() loadPersons()
// Reset scanning state after 3 seconds
delay(3000) delay(3000)
_scanningState.value = ScanningState.Idle _scanningState.value = ScanningState.Idle
} catch (e: Exception) { } catch (e: Exception) {
DiagnosticLogger.e("Scan failed", e)
_scanningState.value = ScanningState.Idle _scanningState.value = ScanningState.Idle
_uiState.value = InventoryUiState.Error( _uiState.value = InventoryUiState.Error(
"Scan failed: ${e.message}" "Scan failed: ${e.message}"
@@ -229,33 +255,28 @@ class PersonInventoryViewModel @Inject constructor(
} }
} }
/**
* Detect faces in an image using ML Kit
*
* @param imageUri URI of the image to scan
* @return List of detected faces with cropped bitmaps
*/
private suspend fun detectFacesInImage(imageUri: String): List<DetectedFace> = withContext(Dispatchers.Default) { private suspend fun detectFacesInImage(imageUri: String): List<DetectedFace> = withContext(Dispatchers.Default) {
try { try {
// Load bitmap from URI
val uri = Uri.parse(imageUri) val uri = Uri.parse(imageUri)
val inputStream = getApplication<Application>().contentResolver.openInputStream(uri) val inputStream = getApplication<Application>().contentResolver.openInputStream(uri)
val bitmap = BitmapFactory.decodeStream(inputStream) val bitmap = BitmapFactory.decodeStream(inputStream)
inputStream?.close() inputStream?.close()
if (bitmap == null) return@withContext emptyList() if (bitmap == null) {
DiagnosticLogger.w("Failed to load bitmap from: $imageUri")
return@withContext emptyList()
}
DiagnosticLogger.d("Bitmap: ${bitmap.width}x${bitmap.height}")
// Create ML Kit input image
val image = InputImage.fromBitmap(bitmap, 0) val image = InputImage.fromBitmap(bitmap, 0)
// Detect faces (await the Task)
val faces = faceDetector.process(image).await() val faces = faceDetector.process(image).await()
// Convert to DetectedFace objects DiagnosticLogger.d("ML Kit found ${faces.size} faces")
faces.mapNotNull { face -> faces.mapNotNull { face ->
val boundingBox = face.boundingBox val boundingBox = face.boundingBox
// Crop face from bitmap with bounds checking
val croppedFace = try { val croppedFace = try {
val left = boundingBox.left.coerceAtLeast(0) val left = boundingBox.left.coerceAtLeast(0)
val top = boundingBox.top.coerceAtLeast(0) val top = boundingBox.top.coerceAtLeast(0)
@@ -268,6 +289,7 @@ class PersonInventoryViewModel @Inject constructor(
null null
} }
} catch (e: Exception) { } catch (e: Exception) {
DiagnosticLogger.e("Face crop failed", e)
null null
} }
@@ -282,13 +304,11 @@ class PersonInventoryViewModel @Inject constructor(
} }
} catch (e: Exception) { } catch (e: Exception) {
DiagnosticLogger.e("Face detection failed: $imageUri", e)
emptyList() emptyList()
} }
} }
/**
* Get sample images for a person
*/
suspend fun getPersonImages(personId: String) = suspend fun getPersonImages(personId: String) =
faceRecognitionRepository.getImagesForPerson(personId) faceRecognitionRepository.getImagesForPerson(personId)

View File

@@ -3,127 +3,337 @@ package com.placeholder.sherpai2.ui.trainingprep
import android.net.Uri import android.net.Uri
import androidx.activity.compose.rememberLauncherForActivityResult import androidx.activity.compose.rememberLauncherForActivityResult
import androidx.activity.result.contract.ActivityResultContracts import androidx.activity.result.contract.ActivityResultContracts
import androidx.compose.animation.AnimatedVisibility
import androidx.compose.foundation.background
import androidx.compose.foundation.layout.* import androidx.compose.foundation.layout.*
import androidx.compose.foundation.lazy.grid.GridCells import androidx.compose.foundation.lazy.grid.GridCells
import androidx.compose.foundation.lazy.grid.LazyVerticalGrid import androidx.compose.foundation.lazy.grid.LazyVerticalGrid
import androidx.compose.foundation.lazy.grid.items
import androidx.compose.foundation.shape.CircleShape
import androidx.compose.foundation.shape.RoundedCornerShape import androidx.compose.foundation.shape.RoundedCornerShape
import androidx.compose.material.icons.Icons import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.filled.AddPhotoAlternate import androidx.compose.material.icons.filled.*
import androidx.compose.material.icons.filled.Close
import androidx.compose.material3.* import androidx.compose.material3.*
import androidx.compose.runtime.* import androidx.compose.runtime.*
import androidx.compose.ui.Alignment import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier import androidx.compose.ui.Modifier
import androidx.compose.ui.layout.ContentScale import androidx.compose.ui.graphics.Brush
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.unit.dp import androidx.compose.ui.unit.dp
import androidx.compose.material3.Text
import androidx.compose.runtime.saveable.rememberSaveable
import androidx.compose.ui.draw.clip
import androidx.compose.ui.platform.LocalContext
import coil.compose.AsyncImage
import androidx.compose.foundation.lazy.grid.items
/**
* Enhanced ImageSelectorScreen
*
* Changes:
* - NO LIMIT on photo count (was 10)
* - Recommends 20-30 photos
* - Real-time progress feedback
* - Quality indicators
* - Training tips
*/
@OptIn(ExperimentalMaterial3Api::class) @OptIn(ExperimentalMaterial3Api::class)
@Composable @Composable
fun ImageSelectorScreen( fun ImageSelectorScreen(
onImagesSelected: (List<Uri>) -> Unit onImagesSelected: (List<Uri>) -> Unit
) { ) {
//1. Persist state across configuration changes var selectedImages by remember { mutableStateOf<List<Uri>>(emptyList()) }
var selectedUris by rememberSaveable { mutableStateOf<List<Uri>>(emptyList()) }
val context = LocalContext.current
val launcher = rememberLauncherForActivityResult( val photoPicker = rememberLauncherForActivityResult(
ActivityResultContracts.OpenMultipleDocuments() contract = ActivityResultContracts.GetMultipleContents()
) { uris -> ) { uris ->
// 2. Take first 10 and try to persist permissions if (uris.isNotEmpty()) {
val limitedUris = uris.take(10) selectedImages = uris
selectedUris = limitedUris }
} }
Scaffold( Scaffold(
topBar = { TopAppBar(title = { Text("Select Training Photos") }) } topBar = {
) { padding -> TopAppBar(
title = { Text("Select Training Photos") },
colors = TopAppBarDefaults.topAppBarColors(
containerColor = MaterialTheme.colorScheme.primaryContainer
)
)
}
) { paddingValues ->
Column( Column(
modifier = Modifier modifier = Modifier
.padding(padding) .fillMaxSize()
.padding(16.dp) .padding(paddingValues)
.fillMaxSize(), .padding(16.dp),
verticalArrangement = Arrangement.spacedBy(16.dp) verticalArrangement = Arrangement.spacedBy(16.dp)
) { ) {
OutlinedCard(
onClick = { launcher.launch(arrayOf("image/*")) }, // Gradient header with tips
modifier = Modifier.fillMaxWidth() Card(
modifier = Modifier.fillMaxWidth(),
colors = CardDefaults.cardColors(
containerColor = MaterialTheme.colorScheme.primaryContainer
),
shape = RoundedCornerShape(16.dp)
) { ) {
Column( Column(
modifier = Modifier.padding(24.dp), modifier = Modifier.padding(20.dp),
horizontalAlignment = Alignment.CenterHorizontally verticalArrangement = Arrangement.spacedBy(12.dp)
) { ) {
Icon(Icons.Default.AddPhotoAlternate, contentDescription = null) Row(
Spacer(Modifier.height(8.dp)) horizontalArrangement = Arrangement.spacedBy(12.dp),
Text("Select up to 10 images of the person") verticalAlignment = Alignment.CenterVertically
Text( ) {
text = "${selectedUris.size} / 10 selected", Surface(
style = MaterialTheme.typography.labelLarge, shape = RoundedCornerShape(12.dp),
color = if (selectedUris.size == 10) MaterialTheme.colorScheme.error color = MaterialTheme.colorScheme.primary,
else if (selectedUris.isNotEmpty()) MaterialTheme.colorScheme.primary modifier = Modifier.size(48.dp)
else MaterialTheme.colorScheme.outline ) {
Box(contentAlignment = Alignment.Center) {
Icon(
Icons.Default.PhotoCamera,
contentDescription = null,
tint = MaterialTheme.colorScheme.onPrimary,
modifier = Modifier.size(28.dp)
) )
} }
} }
// 3. Conditional rendering for empty state Column {
if (selectedUris.isEmpty()) { Text(
Box(Modifier "Training Tips",
.weight(1f) style = MaterialTheme.typography.titleLarge,
.fillMaxWidth(), contentAlignment = Alignment.Center) { fontWeight = FontWeight.Bold
Text("No images selected", style = MaterialTheme.typography.bodyMedium)
}
} else {
LazyVerticalGrid(
columns = GridCells.Fixed(3),
modifier = Modifier.weight(1f),
contentPadding = PaddingValues(4.dp)
) {
items(selectedUris, key = { it.toString() }) { uri ->
Box(modifier = Modifier.padding(4.dp)) {
AsyncImage(
model = uri,
contentDescription = null,
modifier = Modifier
.aspectRatio(1f)
.clip(RoundedCornerShape(8.dp)),
contentScale = ContentScale.Crop
) )
// 4. Ability to remove specific images Text(
Surface( "More photos = better recognition",
onClick = { selectedUris = selectedUris - uri }, style = MaterialTheme.typography.bodyMedium,
modifier = Modifier color = MaterialTheme.colorScheme.onPrimaryContainer.copy(alpha = 0.7f)
.align(Alignment.TopEnd) )
.padding(4.dp), }
shape = CircleShape, }
color = MaterialTheme.colorScheme.surfaceVariant.copy(alpha = 0.8f)
Spacer(Modifier.height(4.dp))
TipItem("✓ Select 20-30 photos for best results", true)
TipItem("✓ Include different angles and lighting", true)
TipItem("✓ Mix expressions (smile, neutral, laugh)", true)
TipItem("✓ With/without glasses if applicable", true)
TipItem("✗ Avoid blurry or very dark photos", false)
}
}
// Progress indicator
AnimatedVisibility(selectedImages.isNotEmpty()) {
ProgressCard(selectedImages.size)
}
Spacer(Modifier.weight(1f))
// Select photos button
Button(
onClick = { photoPicker.launch("image/*") },
modifier = Modifier.fillMaxWidth(),
colors = ButtonDefaults.buttonColors(
containerColor = MaterialTheme.colorScheme.primary
),
contentPadding = PaddingValues(vertical = 16.dp)
) {
Icon(Icons.Default.PhotoLibrary, contentDescription = null)
Spacer(Modifier.width(8.dp))
Text(
if (selectedImages.isEmpty()) {
"Select Training Photos"
} else {
"Selected: ${selectedImages.size} photos - Tap to change"
},
style = MaterialTheme.typography.titleMedium
)
}
// Continue button
AnimatedVisibility(selectedImages.size >= 15) {
Button(
onClick = { onImagesSelected(selectedImages) },
modifier = Modifier.fillMaxWidth(),
colors = ButtonDefaults.buttonColors(
containerColor = MaterialTheme.colorScheme.secondary
),
contentPadding = PaddingValues(vertical = 16.dp)
) {
Icon(Icons.Default.Check, contentDescription = null)
Spacer(Modifier.width(8.dp))
Text(
"Continue with ${selectedImages.size} photos",
style = MaterialTheme.typography.titleMedium
)
}
}
// Minimum warning
if (selectedImages.isNotEmpty() && selectedImages.size < 15) {
Card(
modifier = Modifier.fillMaxWidth(),
colors = CardDefaults.cardColors(
containerColor = MaterialTheme.colorScheme.errorContainer
)
) {
Row(
modifier = Modifier.padding(16.dp),
horizontalArrangement = Arrangement.spacedBy(12.dp),
verticalAlignment = Alignment.CenterVertically
) { ) {
Icon( Icon(
Icons.Default.Close, Icons.Default.Warning,
contentDescription = "Remove", contentDescription = null,
modifier = Modifier.size(16.dp) tint = MaterialTheme.colorScheme.error
)
Column {
Text(
"Need at least 15 photos",
style = MaterialTheme.typography.titleSmall,
fontWeight = FontWeight.Bold,
color = MaterialTheme.colorScheme.onErrorContainer
)
Text(
"You have ${selectedImages.size}. Select ${15 - selectedImages.size} more.",
style = MaterialTheme.typography.bodySmall,
color = MaterialTheme.colorScheme.onErrorContainer.copy(alpha = 0.8f)
) )
} }
} }
} }
} }
} }
}
}
Button( @Composable
modifier = Modifier.fillMaxWidth(), private fun TipItem(text: String, isGood: Boolean) {
enabled = selectedUris.isNotEmpty(), Row(
onClick = { onImagesSelected(selectedUris) } horizontalArrangement = Arrangement.spacedBy(8.dp),
verticalAlignment = Alignment.Top
) { ) {
Text("Start Face Detection") Icon(
if (isGood) Icons.Default.CheckCircle else Icons.Default.Cancel,
contentDescription = null,
modifier = Modifier.size(18.dp),
tint = if (isGood) {
MaterialTheme.colorScheme.primary
} else {
MaterialTheme.colorScheme.error
}
)
Text(
text = text,
style = MaterialTheme.typography.bodyMedium,
color = MaterialTheme.colorScheme.onPrimaryContainer
)
}
}
@Composable
private fun ProgressCard(photoCount: Int) {
Card(
modifier = Modifier.fillMaxWidth(),
colors = CardDefaults.cardColors(
containerColor = when {
photoCount >= 25 -> MaterialTheme.colorScheme.primaryContainer
photoCount >= 20 -> MaterialTheme.colorScheme.tertiaryContainer
else -> MaterialTheme.colorScheme.surfaceVariant
}
),
shape = RoundedCornerShape(16.dp)
) {
Column(
modifier = Modifier.padding(20.dp),
verticalArrangement = Arrangement.spacedBy(12.dp)
) {
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.SpaceBetween,
verticalAlignment = Alignment.CenterVertically
) {
Column {
Text(
text = "$photoCount photos selected",
style = MaterialTheme.typography.titleMedium,
fontWeight = FontWeight.Bold
)
Text(
text = when {
photoCount >= 30 -> "Excellent! Maximum diversity"
photoCount >= 25 -> "Great! Very good coverage"
photoCount >= 20 -> "Good! Should work well"
photoCount >= 15 -> "Acceptable - more is better"
else -> "Need ${15 - photoCount} more"
},
style = MaterialTheme.typography.bodyMedium,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
}
Surface(
shape = RoundedCornerShape(12.dp),
color = when {
photoCount >= 25 -> MaterialTheme.colorScheme.primary
photoCount >= 20 -> MaterialTheme.colorScheme.tertiary
photoCount >= 15 -> MaterialTheme.colorScheme.secondary
else -> MaterialTheme.colorScheme.outline
},
modifier = Modifier.size(56.dp)
) {
Box(contentAlignment = Alignment.Center) {
Text(
text = when {
photoCount >= 25 -> ""
photoCount >= 20 -> ""
photoCount >= 15 -> ""
else -> "..."
},
style = MaterialTheme.typography.headlineMedium,
color = Color.White
)
}
}
}
// Progress bar
LinearProgressIndicator(
progress = { (photoCount / 30f).coerceAtMost(1f) },
modifier = Modifier
.fillMaxWidth()
.height(8.dp),
color = when {
photoCount >= 25 -> MaterialTheme.colorScheme.primary
photoCount >= 20 -> MaterialTheme.colorScheme.tertiary
else -> MaterialTheme.colorScheme.secondary
},
trackColor = MaterialTheme.colorScheme.surfaceVariant,
)
// Expected accuracy
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.SpaceBetween
) {
Text(
"Expected accuracy:",
style = MaterialTheme.typography.labelMedium,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
Text(
when {
photoCount >= 30 -> "90-95%"
photoCount >= 25 -> "85-90%"
photoCount >= 20 -> "80-85%"
photoCount >= 15 -> "75-80%"
else -> "< 75%"
},
style = MaterialTheme.typography.labelLarge,
fontWeight = FontWeight.Bold,
color = when {
photoCount >= 25 -> MaterialTheme.colorScheme.primary
photoCount >= 20 -> MaterialTheme.colorScheme.tertiary
else -> MaterialTheme.colorScheme.secondary
}
)
} }
} }
} }

View File

@@ -5,6 +5,7 @@ import android.graphics.Bitmap
import android.net.Uri import android.net.Uri
import androidx.lifecycle.AndroidViewModel import androidx.lifecycle.AndroidViewModel
import androidx.lifecycle.viewModelScope import androidx.lifecycle.viewModelScope
import com.placeholder.sherpai2.data.local.entity.PersonEntity
import com.placeholder.sherpai2.data.repository.FaceRecognitionRepository import com.placeholder.sherpai2.data.repository.FaceRecognitionRepository
import com.placeholder.sherpai2.ml.FaceNetModel import com.placeholder.sherpai2.ml.FaceNetModel
import dagger.hilt.android.lifecycle.HiltViewModel import dagger.hilt.android.lifecycle.HiltViewModel
@@ -14,9 +15,6 @@ import kotlinx.coroutines.flow.asStateFlow
import kotlinx.coroutines.launch import kotlinx.coroutines.launch
import javax.inject.Inject import javax.inject.Inject
/**
* State for image scanning and validation
*/
sealed class ScanningState { sealed class ScanningState {
object Idle : ScanningState() object Idle : ScanningState()
data class Processing(val progress: Int, val total: Int) : ScanningState() data class Processing(val progress: Int, val total: Int) : ScanningState()
@@ -26,26 +24,26 @@ sealed class ScanningState {
data class Error(val message: String) : ScanningState() data class Error(val message: String) : ScanningState()
} }
/**
* State for face model training/creation
*/
sealed class TrainingState { sealed class TrainingState {
object Idle : TrainingState() object Idle : TrainingState()
data class Processing(val stage: String, val progress: Int, val total: Int) : TrainingState() data class Processing(val stage: String, val progress: Int, val total: Int) : TrainingState()
data class Success(val personName: String, val personId: String) : TrainingState() data class Success(
val personName: String,
val personId: String,
val relationship: String?
) : TrainingState()
data class Error(val message: String) : TrainingState() data class Error(val message: String) : TrainingState()
} }
/** /**
* ViewModel for training face recognition models * Person info captured before photo selection
*
* WORKFLOW:
* 1. User selects 10+ images → scanAndTagFaces()
* 2. Images validated → Success state with validImagesWithFaces
* 3. User can replace images or pick faces from group photos
* 4. When ready → createFaceModel(personName)
* 5. Creates PersonEntity + FaceModelEntity in database
*/ */
data class PersonInfo(
val name: String,
val dateOfBirth: Long?,
val relationship: String
)
@HiltViewModel @HiltViewModel
class TrainViewModel @Inject constructor( class TrainViewModel @Inject constructor(
application: Application, application: Application,
@@ -56,18 +54,16 @@ class TrainViewModel @Inject constructor(
private val sanityChecker = TrainingSanityChecker(application) private val sanityChecker = TrainingSanityChecker(application)
private val faceDetectionHelper = FaceDetectionHelper(application) private val faceDetectionHelper = FaceDetectionHelper(application)
// Scanning/validation state
private val _uiState = MutableStateFlow<ScanningState>(ScanningState.Idle) private val _uiState = MutableStateFlow<ScanningState>(ScanningState.Idle)
val uiState: StateFlow<ScanningState> = _uiState.asStateFlow() val uiState: StateFlow<ScanningState> = _uiState.asStateFlow()
// Training/model creation state
private val _trainingState = MutableStateFlow<TrainingState>(TrainingState.Idle) private val _trainingState = MutableStateFlow<TrainingState>(TrainingState.Idle)
val trainingState: StateFlow<TrainingState> = _trainingState.asStateFlow() val trainingState: StateFlow<TrainingState> = _trainingState.asStateFlow()
// Keep track of current images for replacements // Store person info for later use during training
private var currentImageUris: List<Uri> = emptyList() private var personInfo: PersonInfo? = null
// Keep track of manual face selections (imageUri -> selectedFaceIndex) private var currentImageUris: List<Uri> = emptyList()
private val manualFaceSelections = mutableMapOf<Uri, ManualFaceSelection>() private val manualFaceSelections = mutableMapOf<Uri, ManualFaceSelection>()
data class ManualFaceSelection( data class ManualFaceSelection(
@@ -75,28 +71,15 @@ class TrainViewModel @Inject constructor(
val croppedFaceBitmap: Bitmap val croppedFaceBitmap: Bitmap
) )
// ====================== /**
// FACE MODEL CREATION * Store person info before photo selection
// ====================== */
fun setPersonInfo(name: String, dateOfBirth: Long?, relationship: String) {
personInfo = PersonInfo(name, dateOfBirth, relationship)
}
/** /**
* Create face model from validated training images. * Create face model with captured person info
*
* COMPLETE PROCESS:
* 1. Verify we have 10+ validated images
* 2. Call repository to create PersonEntity + FaceModelEntity
* 3. Repository handles: embedding generation, averaging, database save
*
* Call this when user clicks "Continue to Training" after validation passes.
*
* @param personName Name for the new person
*
* EXAMPLE USAGE IN UI:
* if (result.isValid) {
* showNameDialog { name ->
* trainViewModel.createFaceModel(name)
* }
* }
*/ */
fun createFaceModel(personName: String) { fun createFaceModel(personName: String) {
val currentState = _uiState.value val currentState = _uiState.value
@@ -106,8 +89,10 @@ class TrainViewModel @Inject constructor(
} }
val validImages = currentState.sanityCheckResult.validImagesWithFaces val validImages = currentState.sanityCheckResult.validImagesWithFaces
if (validImages.size < 10) { if (validImages.size < 15) { // Updated minimum
_trainingState.value = TrainingState.Error("Need at least 10 valid images, have ${validImages.size}") _trainingState.value = TrainingState.Error(
"Need at least 15 valid images, have ${validImages.size}"
)
return return
} }
@@ -119,13 +104,16 @@ class TrainViewModel @Inject constructor(
total = validImages.size total = validImages.size
) )
// Repository handles everything: // Create person with captured info
// - Creates PersonEntity in 'persons' table val person = PersonEntity.create(
// - Generates embeddings from face bitmaps name = personName,
// - Averages embeddings dateOfBirth = personInfo?.dateOfBirth,
// - Creates FaceModelEntity linked to PersonEntity relationship = personInfo?.relationship
)
// Create person with face model
val personId = faceRecognitionRepository.createPersonWithFaceModel( val personId = faceRecognitionRepository.createPersonWithFaceModel(
personName = personName, person = person, // Pass full PersonEntity now
validImages = validImages, validImages = validImages,
onProgress = { current, total -> onProgress = { current, total ->
_trainingState.value = TrainingState.Processing( _trainingState.value = TrainingState.Processing(
@@ -138,7 +126,8 @@ class TrainViewModel @Inject constructor(
_trainingState.value = TrainingState.Success( _trainingState.value = TrainingState.Success(
personName = personName, personName = personName,
personId = personId personId = personId,
relationship = person.relationship
) )
} catch (e: Exception) { } catch (e: Exception) {
@@ -149,40 +138,16 @@ class TrainViewModel @Inject constructor(
} }
} }
/**
* Reset training state back to idle.
* Call this after handling success/error.
*/
fun resetTrainingState() { fun resetTrainingState() {
_trainingState.value = TrainingState.Idle _trainingState.value = TrainingState.Idle
} }
// ======================
// IMAGE VALIDATION
// ======================
/**
* Scan and validate images for training.
*
* PROCESS:
* 1. Face detection on all images
* 2. Duplicate checking
* 3. Validation against requirements (10+ images, one face per image)
*
* @param imageUris List of image URIs selected by user
*/
fun scanAndTagFaces(imageUris: List<Uri>) { fun scanAndTagFaces(imageUris: List<Uri>) {
currentImageUris = imageUris currentImageUris = imageUris
manualFaceSelections.clear() manualFaceSelections.clear()
performScan(imageUris) performScan(imageUris)
} }
/**
* Replace a single image and re-scan all images.
*
* @param oldUri Image to replace
* @param newUri New image
*/
fun replaceImage(oldUri: Uri, newUri: Uri) { fun replaceImage(oldUri: Uri, newUri: Uri) {
viewModelScope.launch { viewModelScope.launch {
val updatedUris = currentImageUris.toMutableList() val updatedUris = currentImageUris.toMutableList()
@@ -191,27 +156,15 @@ class TrainViewModel @Inject constructor(
if (index != -1) { if (index != -1) {
updatedUris[index] = newUri updatedUris[index] = newUri
currentImageUris = updatedUris currentImageUris = updatedUris
// Remove manual selection for old URI if any
manualFaceSelections.remove(oldUri) manualFaceSelections.remove(oldUri)
// Re-scan all images
performScan(currentImageUris) performScan(currentImageUris)
} }
} }
} }
/**
* User manually selected a face from a multi-face image.
*
* @param imageUri Image with multiple faces
* @param faceIndex Which face the user selected (0-based)
* @param croppedFaceBitmap Cropped face bitmap
*/
fun selectFaceFromImage(imageUri: Uri, faceIndex: Int, croppedFaceBitmap: Bitmap) { fun selectFaceFromImage(imageUri: Uri, faceIndex: Int, croppedFaceBitmap: Bitmap) {
manualFaceSelections[imageUri] = ManualFaceSelection(faceIndex, croppedFaceBitmap) manualFaceSelections[imageUri] = ManualFaceSelection(faceIndex, croppedFaceBitmap)
// Re-process the results with the manual selection
val currentState = _uiState.value val currentState = _uiState.value
if (currentState is ScanningState.Success) { if (currentState is ScanningState.Success) {
val updatedResult = applyManualSelections(currentState.sanityCheckResult) val updatedResult = applyManualSelections(currentState.sanityCheckResult)
@@ -219,25 +172,19 @@ class TrainViewModel @Inject constructor(
} }
} }
/**
* Perform the actual scanning.
*/
private fun performScan(imageUris: List<Uri>) { private fun performScan(imageUris: List<Uri>) {
viewModelScope.launch { viewModelScope.launch {
try { try {
_uiState.value = ScanningState.Processing(0, imageUris.size) _uiState.value = ScanningState.Processing(0, imageUris.size)
// Perform sanity checks
val result = sanityChecker.performSanityChecks( val result = sanityChecker.performSanityChecks(
imageUris = imageUris, imageUris = imageUris,
minImagesRequired = 10, minImagesRequired = 15, // Updated minimum
allowMultipleFaces = true, // Allow multiple faces - user can pick allowMultipleFaces = true,
duplicateSimilarityThreshold = 0.95 duplicateSimilarityThreshold = 0.95
) )
// Apply any manual face selections
val finalResult = applyManualSelections(result) val finalResult = applyManualSelections(result)
_uiState.value = ScanningState.Success(finalResult) _uiState.value = ScanningState.Success(finalResult)
} catch (e: Exception) { } catch (e: Exception) {
@@ -248,26 +195,19 @@ class TrainViewModel @Inject constructor(
} }
} }
/**
* Apply manual face selections to the results.
*/
private fun applyManualSelections( private fun applyManualSelections(
result: TrainingSanityChecker.SanityCheckResult result: TrainingSanityChecker.SanityCheckResult
): TrainingSanityChecker.SanityCheckResult { ): TrainingSanityChecker.SanityCheckResult {
// If no manual selections, return original
if (manualFaceSelections.isEmpty()) { if (manualFaceSelections.isEmpty()) {
return result return result
} }
// Update face detection results with manual selections
val updatedFaceResults = result.faceDetectionResults.map { faceResult -> val updatedFaceResults = result.faceDetectionResults.map { faceResult ->
val manualSelection = manualFaceSelections[faceResult.uri] val manualSelection = manualFaceSelections[faceResult.uri]
if (manualSelection != null) { if (manualSelection != null) {
// Replace the cropped face with the manually selected one
faceResult.copy( faceResult.copy(
croppedFaceBitmap = manualSelection.croppedFaceBitmap, croppedFaceBitmap = manualSelection.croppedFaceBitmap,
// Treat as single face since user selected one
faceCount = 1 faceCount = 1
) )
} else { } else {
@@ -275,12 +215,11 @@ class TrainViewModel @Inject constructor(
} }
} }
// Update valid images list
val updatedValidImages = updatedFaceResults val updatedValidImages = updatedFaceResults
.filter { it.hasFace } .filter { it.hasFace }
.filter { it.croppedFaceBitmap != null } .filter { it.croppedFaceBitmap != null }
.filter { it.errorMessage == null } .filter { it.errorMessage == null }
.filter { it.faceCount >= 1 } // Now accept if user picked a face .filter { it.faceCount >= 1 }
.map { result -> .map { result ->
TrainingSanityChecker.ValidTrainingImage( TrainingSanityChecker.ValidTrainingImage(
uri = result.uri, uri = result.uri,
@@ -289,31 +228,27 @@ class TrainViewModel @Inject constructor(
) )
} }
// Recalculate validation errors
val updatedErrors = result.validationErrors.toMutableList() val updatedErrors = result.validationErrors.toMutableList()
// Remove multiple face errors for images with manual selections
updatedErrors.removeAll { error -> updatedErrors.removeAll { error ->
error is TrainingSanityChecker.ValidationError.MultipleFacesDetected && error is TrainingSanityChecker.ValidationError.MultipleFacesDetected &&
manualFaceSelections.containsKey(error.uri) manualFaceSelections.containsKey(error.uri)
} }
// Check if we have enough valid images now if (updatedValidImages.size < 15) { // Updated minimum
if (updatedValidImages.size < 10) {
if (updatedErrors.none { it is TrainingSanityChecker.ValidationError.InsufficientImages }) { if (updatedErrors.none { it is TrainingSanityChecker.ValidationError.InsufficientImages }) {
updatedErrors.add( updatedErrors.add(
TrainingSanityChecker.ValidationError.InsufficientImages( TrainingSanityChecker.ValidationError.InsufficientImages(
required = 10, required = 15,
available = updatedValidImages.size available = updatedValidImages.size
) )
) )
} }
} else { } else {
// Remove insufficient images error if we now have enough
updatedErrors.removeAll { it is TrainingSanityChecker.ValidationError.InsufficientImages } updatedErrors.removeAll { it is TrainingSanityChecker.ValidationError.InsufficientImages }
} }
val isValid = updatedErrors.isEmpty() && updatedValidImages.size >= 10 val isValid = updatedErrors.isEmpty() && updatedValidImages.size >= 15
return result.copy( return result.copy(
isValid = isValid, isValid = isValid,
@@ -323,21 +258,16 @@ class TrainViewModel @Inject constructor(
) )
} }
/**
* Get formatted error messages.
*/
fun getFormattedErrors(result: TrainingSanityChecker.SanityCheckResult): List<String> { fun getFormattedErrors(result: TrainingSanityChecker.SanityCheckResult): List<String> {
return sanityChecker.formatValidationErrors(result.validationErrors) return sanityChecker.formatValidationErrors(result.validationErrors)
} }
/**
* Reset to idle state.
*/
fun reset() { fun reset() {
_uiState.value = ScanningState.Idle _uiState.value = ScanningState.Idle
_trainingState.value = TrainingState.Idle _trainingState.value = TrainingState.Idle
currentImageUris = emptyList() currentImageUris = emptyList()
manualFaceSelections.clear() manualFaceSelections.clear()
personInfo = null
} }
override fun onCleared() { override fun onCleared() {
@@ -348,13 +278,7 @@ class TrainViewModel @Inject constructor(
} }
} }
// ====================== // Extension functions for copying results
// EXTENSION FUNCTIONS
// ======================
/**
* Extension to copy FaceDetectionResult with modifications.
*/
private fun FaceDetectionHelper.FaceDetectionResult.copy( private fun FaceDetectionHelper.FaceDetectionResult.copy(
uri: Uri = this.uri, uri: Uri = this.uri,
hasFace: Boolean = this.hasFace, hasFace: Boolean = this.hasFace,
@@ -373,9 +297,6 @@ private fun FaceDetectionHelper.FaceDetectionResult.copy(
) )
} }
/**
* Extension to copy SanityCheckResult with modifications.
*/
private fun TrainingSanityChecker.SanityCheckResult.copy( private fun TrainingSanityChecker.SanityCheckResult.copy(
isValid: Boolean = this.isValid, isValid: Boolean = this.isValid,
faceDetectionResults: List<FaceDetectionHelper.FaceDetectionResult> = this.faceDetectionResults, faceDetectionResults: List<FaceDetectionHelper.FaceDetectionResult> = this.faceDetectionResults,

View File

@@ -1,31 +1,516 @@
package com.placeholder.sherpai2.ui.trainingprep package com.placeholder.sherpai2.ui.trainingprep
import androidx.compose.foundation.layout.padding import androidx.compose.animation.AnimatedVisibility
import androidx.compose.material3.Button import androidx.compose.foundation.background
import androidx.compose.material3.ExperimentalMaterial3Api import androidx.compose.foundation.layout.*
import androidx.compose.material3.Scaffold import androidx.compose.foundation.rememberScrollState
import androidx.compose.material3.Text import androidx.compose.foundation.shape.RoundedCornerShape
import androidx.compose.material3.TopAppBar import androidx.compose.foundation.verticalScroll
import androidx.compose.runtime.Composable import androidx.compose.material.icons.Icons
import androidx.compose.material.icons.filled.*
import androidx.compose.material3.*
import androidx.compose.runtime.*
import androidx.compose.ui.Alignment
import androidx.compose.ui.Modifier import androidx.compose.ui.Modifier
import androidx.hilt.lifecycle.viewmodel.compose.hiltViewModel import androidx.compose.ui.graphics.Brush
import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.text.style.TextAlign
import androidx.compose.ui.unit.dp
import java.text.SimpleDateFormat
import java.util.*
/**
* Beautiful TrainingScreen with person info capture
*
* Features:
* - Name input
* - Date of birth picker
* - Relationship selector
* - Onboarding cards
* - Beautiful gradient design
* - Clear call to action
*/
@OptIn(ExperimentalMaterial3Api::class) @OptIn(ExperimentalMaterial3Api::class)
@Composable @Composable
fun TrainingScreen( fun TrainingScreen(
onSelectImages: () -> Unit onSelectImages: () -> Unit,
modifier: Modifier = Modifier
) { ) {
var showInfoDialog by remember { mutableStateOf(false) }
Scaffold( Scaffold(
topBar = { topBar = {
TopAppBar( TopAppBar(
title = { Text("Training") } title = { Text("Train New Person") },
colors = TopAppBarDefaults.topAppBarColors(
containerColor = MaterialTheme.colorScheme.primaryContainer
)
) )
} }
) { padding -> ) { paddingValues ->
Button( Column(
modifier = Modifier.padding(padding), modifier = modifier
onClick = onSelectImages .fillMaxSize()
.padding(paddingValues)
.verticalScroll(rememberScrollState())
.padding(20.dp),
verticalArrangement = Arrangement.spacedBy(20.dp)
) { ) {
Text("Select Images")
// Hero section with gradient
HeroCard()
// How it works section
HowItWorksSection()
// Requirements section
RequirementsCard()
Spacer(Modifier.weight(1f))
// Main CTA button
Button(
onClick = { showInfoDialog = true },
modifier = Modifier
.fillMaxWidth()
.height(60.dp),
colors = ButtonDefaults.buttonColors(
containerColor = MaterialTheme.colorScheme.primary
),
shape = RoundedCornerShape(16.dp)
) {
Icon(
Icons.Default.PersonAdd,
contentDescription = null,
modifier = Modifier.size(24.dp)
)
Spacer(Modifier.width(12.dp))
Text(
"Start Training",
style = MaterialTheme.typography.titleLarge,
fontWeight = FontWeight.Bold
)
}
Spacer(Modifier.height(8.dp))
}
}
// Person info dialog
if (showInfoDialog) {
PersonInfoDialog(
onDismiss = { showInfoDialog = false },
onConfirm = { name, dob, relationship ->
showInfoDialog = false
// TODO: Store this info before photo selection
// For now, just proceed to photo selection
onSelectImages()
}
)
}
}
@Composable
private fun HeroCard() {
Card(
modifier = Modifier.fillMaxWidth(),
colors = CardDefaults.cardColors(
containerColor = MaterialTheme.colorScheme.primaryContainer
),
shape = RoundedCornerShape(20.dp)
) {
Box(
modifier = Modifier
.fillMaxWidth()
.background(
Brush.verticalGradient(
colors = listOf(
MaterialTheme.colorScheme.primaryContainer,
MaterialTheme.colorScheme.primaryContainer.copy(alpha = 0.7f)
)
)
)
) {
Column(
modifier = Modifier.padding(24.dp),
horizontalAlignment = Alignment.CenterHorizontally,
verticalArrangement = Arrangement.spacedBy(16.dp)
) {
Surface(
shape = RoundedCornerShape(20.dp),
color = MaterialTheme.colorScheme.primary,
shadowElevation = 8.dp,
modifier = Modifier.size(80.dp)
) {
Box(contentAlignment = Alignment.Center) {
Icon(
Icons.Default.Face,
contentDescription = null,
modifier = Modifier.size(48.dp),
tint = MaterialTheme.colorScheme.onPrimary
)
}
}
Text(
"Face Recognition Training",
style = MaterialTheme.typography.headlineMedium,
fontWeight = FontWeight.Bold,
textAlign = TextAlign.Center
)
Text(
"Train the AI to recognize someone in your photos",
style = MaterialTheme.typography.bodyLarge,
textAlign = TextAlign.Center,
color = MaterialTheme.colorScheme.onPrimaryContainer.copy(alpha = 0.8f)
)
} }
} }
} }
}
@Composable
private fun HowItWorksSection() {
Column(verticalArrangement = Arrangement.spacedBy(12.dp)) {
Text(
"How It Works",
style = MaterialTheme.typography.titleLarge,
fontWeight = FontWeight.Bold
)
StepCard(
number = 1,
icon = Icons.Default.Info,
title = "Enter Person Details",
description = "Name, birthday, and relationship"
)
StepCard(
number = 2,
icon = Icons.Default.PhotoLibrary,
title = "Select Training Photos",
description = "Choose 20-30 photos of the person"
)
StepCard(
number = 3,
icon = Icons.Default.ModelTraining,
title = "AI Learns Their Face",
description = "Takes ~30 seconds to train"
)
StepCard(
number = 4,
icon = Icons.Default.Search,
title = "Auto-Tag Your Library",
description = "Find them in all your photos"
)
}
}
@Composable
private fun StepCard(
number: Int,
icon: androidx.compose.ui.graphics.vector.ImageVector,
title: String,
description: String
) {
Card(
modifier = Modifier.fillMaxWidth(),
colors = CardDefaults.cardColors(
containerColor = MaterialTheme.colorScheme.surfaceVariant
),
shape = RoundedCornerShape(12.dp)
) {
Row(
modifier = Modifier.padding(16.dp),
horizontalArrangement = Arrangement.spacedBy(16.dp),
verticalAlignment = Alignment.CenterVertically
) {
// Number badge
Surface(
shape = RoundedCornerShape(12.dp),
color = MaterialTheme.colorScheme.primary,
modifier = Modifier.size(48.dp)
) {
Box(contentAlignment = Alignment.Center) {
Text(
text = number.toString(),
style = MaterialTheme.typography.titleLarge,
fontWeight = FontWeight.Bold,
color = MaterialTheme.colorScheme.onPrimary
)
}
}
Column(modifier = Modifier.weight(1f)) {
Row(
horizontalArrangement = Arrangement.spacedBy(8.dp),
verticalAlignment = Alignment.CenterVertically
) {
Icon(
icon,
contentDescription = null,
modifier = Modifier.size(20.dp),
tint = MaterialTheme.colorScheme.primary
)
Text(
title,
style = MaterialTheme.typography.titleMedium,
fontWeight = FontWeight.SemiBold
)
}
Spacer(Modifier.height(4.dp))
Text(
description,
style = MaterialTheme.typography.bodyMedium,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
}
}
}
}
@Composable
private fun RequirementsCard() {
Card(
modifier = Modifier.fillMaxWidth(),
colors = CardDefaults.cardColors(
containerColor = MaterialTheme.colorScheme.secondaryContainer
),
shape = RoundedCornerShape(16.dp)
) {
Column(
modifier = Modifier.padding(20.dp),
verticalArrangement = Arrangement.spacedBy(12.dp)
) {
Row(
horizontalArrangement = Arrangement.spacedBy(8.dp),
verticalAlignment = Alignment.CenterVertically
) {
Icon(
Icons.Default.CheckCircle,
contentDescription = null,
tint = MaterialTheme.colorScheme.primary
)
Text(
"What You'll Need",
style = MaterialTheme.typography.titleMedium,
fontWeight = FontWeight.Bold
)
}
RequirementItem("20-30 photos of the person", true)
RequirementItem("Different angles and lighting", true)
RequirementItem("Clear face visibility", true)
RequirementItem("Mix of expressions", true)
RequirementItem("2-3 minutes of your time", true)
}
}
}
@Composable
private fun RequirementItem(text: String, isMet: Boolean) {
Row(
horizontalArrangement = Arrangement.spacedBy(8.dp),
verticalAlignment = Alignment.CenterVertically
) {
Icon(
if (isMet) Icons.Default.Check else Icons.Default.Close,
contentDescription = null,
modifier = Modifier.size(18.dp),
tint = if (isMet) {
MaterialTheme.colorScheme.primary
} else {
MaterialTheme.colorScheme.error
}
)
Text(
text = text,
style = MaterialTheme.typography.bodyMedium
)
}
}
@OptIn(ExperimentalMaterial3Api::class)
@Composable
private fun PersonInfoDialog(
onDismiss: () -> Unit,
onConfirm: (name: String, dateOfBirth: Long?, relationship: String) -> Unit
) {
var name by remember { mutableStateOf("") }
var dateOfBirth by remember { mutableStateOf<Long?>(null) }
var selectedRelationship by remember { mutableStateOf("Other") }
var showDatePicker by remember { mutableStateOf(false) }
val relationships = listOf(
"Family" to "👨‍👩‍👧‍👦",
"Friend" to "🤝",
"Partner" to "❤️",
"Child" to "👶",
"Parent" to "👪",
"Sibling" to "👫",
"Colleague" to "💼",
"Other" to "👤"
)
AlertDialog(
onDismissRequest = onDismiss,
title = {
Column {
Text("Person Details")
Text(
"Help us organize your photos",
style = MaterialTheme.typography.bodySmall,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
}
},
text = {
Column(
modifier = Modifier.fillMaxWidth(),
verticalArrangement = Arrangement.spacedBy(16.dp)
) {
// Name field
OutlinedTextField(
value = name,
onValueChange = { name = it },
label = { Text("Name *") },
placeholder = { Text("e.g., John Doe") },
leadingIcon = {
Icon(Icons.Default.Person, contentDescription = null)
},
modifier = Modifier.fillMaxWidth(),
singleLine = true
)
// Date of birth
OutlinedButton(
onClick = { showDatePicker = true },
modifier = Modifier.fillMaxWidth()
) {
Icon(Icons.Default.Cake, contentDescription = null)
Spacer(Modifier.width(8.dp))
Text(
if (dateOfBirth != null) {
"Birthday: ${formatDate(dateOfBirth!!)}"
} else {
"Add Birthday (Optional)"
}
)
}
// Relationship selector
Column(verticalArrangement = Arrangement.spacedBy(8.dp)) {
Text(
"Relationship",
style = MaterialTheme.typography.labelMedium
)
// Relationship chips
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.spacedBy(8.dp)
) {
relationships.take(4).forEach { (rel, emoji) ->
FilterChip(
selected = selectedRelationship == rel,
onClick = { selectedRelationship = rel },
label = { Text("$emoji $rel") }
)
}
}
Row(
modifier = Modifier.fillMaxWidth(),
horizontalArrangement = Arrangement.spacedBy(8.dp)
) {
relationships.drop(4).forEach { (rel, emoji) ->
FilterChip(
selected = selectedRelationship == rel,
onClick = { selectedRelationship = rel },
label = { Text("$emoji $rel") }
)
}
}
}
// Privacy note
Card(
colors = CardDefaults.cardColors(
containerColor = MaterialTheme.colorScheme.surfaceVariant
)
) {
Row(
modifier = Modifier.padding(12.dp),
horizontalArrangement = Arrangement.spacedBy(8.dp)
) {
Icon(
Icons.Default.Lock,
contentDescription = null,
modifier = Modifier.size(16.dp),
tint = MaterialTheme.colorScheme.primary
)
Text(
"All data stays on your device",
style = MaterialTheme.typography.bodySmall,
color = MaterialTheme.colorScheme.onSurfaceVariant
)
}
}
}
},
confirmButton = {
Button(
onClick = {
if (name.isNotBlank()) {
onConfirm(name, dateOfBirth, selectedRelationship)
}
},
enabled = name.isNotBlank()
) {
Text("Continue")
}
},
dismissButton = {
TextButton(onClick = onDismiss) {
Text("Cancel")
}
}
)
// Date picker dialog
if (showDatePicker) {
DatePickerDialog(
onDismissRequest = { showDatePicker = false },
confirmButton = {
TextButton(
onClick = {
// Get selected date from date picker
// For now, set to current date as placeholder
dateOfBirth = System.currentTimeMillis()
showDatePicker = false
}
) {
Text("OK")
}
},
dismissButton = {
TextButton(onClick = { showDatePicker = false }) {
Text("Cancel")
}
}
) {
// Material3 DatePicker
DatePicker(
state = rememberDatePickerState(),
modifier = Modifier.padding(16.dp)
)
}
}
}
private fun formatDate(timestamp: Long): String {
val formatter = SimpleDateFormat("MMM dd, yyyy", Locale.getDefault())
return formatter.format(Date(timestamp))
}

View File

@@ -0,0 +1,68 @@
package com.placeholder.sherpai2.util
/**
* Debug feature flags
*
* Toggle these to enable/disable diagnostic features
* Set to false before release builds!
*/
object DebugFlags {
/**
* Enable verbose face recognition logging
*
* When true:
* - Logs every face detection
* - Logs similarity scores
* - Logs matching decisions
* - Shows why images are skipped
*
* Filter Logcat by: "FaceRecognition"
*/
const val ENABLE_FACE_RECOGNITION_LOGGING = true // ← Toggle here
/**
* Show confidence scores in UI
*/
const val SHOW_CONFIDENCE_IN_UI = true // ← Toggle here
/**
* Lower thresholds for better recall (more matches, some false positives)
*/
const val USE_LIBERAL_THRESHOLDS = true // ← Toggle here
}
/**
* Diagnostic logger - only logs when flag is enabled
*/
object DiagnosticLogger {
private const val TAG = "FaceRecognition"
fun d(message: String) {
if (DebugFlags.ENABLE_FACE_RECOGNITION_LOGGING) {
android.util.Log.d(TAG, message)
}
}
fun i(message: String) {
if (DebugFlags.ENABLE_FACE_RECOGNITION_LOGGING) {
android.util.Log.i(TAG, message)
}
}
fun w(message: String) {
if (DebugFlags.ENABLE_FACE_RECOGNITION_LOGGING) {
android.util.Log.w(TAG, message)
}
}
fun e(message: String, throwable: Throwable? = null) {
if (DebugFlags.ENABLE_FACE_RECOGNITION_LOGGING) {
if (throwable != null) {
android.util.Log.e(TAG, message, throwable)
} else {
android.util.Log.e(TAG, message)
}
}
}
}