chore: KMP audit — commonize code, centralize utilities, eliminate dead abstractions (#5133)

Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
This commit is contained in:
James Rich 2026-04-14 21:17:50 -05:00 committed by GitHub
parent 50ade01e55
commit 72b981f73b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
132 changed files with 2186 additions and 916 deletions

View file

@ -17,6 +17,7 @@
package org.meshtastic.core.database
import okio.ByteString.Companion.encodeUtf8
import org.meshtastic.core.common.util.normalizeAddress
object DatabaseConstants {
const val DB_PREFIX: String = "meshtastic_database"
@ -40,17 +41,6 @@ object DatabaseConstants {
const val ADDRESS_ANON_EDGE_LEN: Int = 2
}
fun normalizeAddress(addr: String?): String {
val u = addr?.trim()?.uppercase()
val normalized =
when {
u.isNullOrBlank() -> "DEFAULT"
u == "N" || u == "NULL" -> "DEFAULT"
else -> u.replace(":", "")
}
return normalized
}
fun shortSha1(s: String): String = s.encodeUtf8().sha1().hex().take(DatabaseConstants.DB_NAME_HASH_LEN)
fun buildDbName(address: String?): String = if (address.isNullOrBlank()) {

View file

@ -241,6 +241,7 @@ open class DatabaseManager(
victims.forEach { name ->
runCatching {
// runCatching intentional: best-effort cleanup must not abort on cancellation
closeCachedDatabase(name)
deleteDatabase(name)
datastore.edit { it.remove(lastUsedKey(name)) }
@ -266,6 +267,7 @@ open class DatabaseManager(
if (fs.exists(legacyPath)) {
runCatching {
// runCatching intentional: best-effort cleanup must not abort on cancellation
closeCachedDatabase(legacy)
deleteDatabase(legacy)
}

View file

@ -94,8 +94,9 @@ import org.meshtastic.core.database.entity.TracerouteNodePositionEntity
AutoMigration(from = 34, to = 35, spec = AutoMigration34to35::class),
AutoMigration(from = 35, to = 36),
AutoMigration(from = 36, to = 37),
AutoMigration(from = 37, to = 38),
],
version = 37,
version = 38,
exportSchema = true,
)
@androidx.room3.ConstructedBy(MeshtasticDatabaseConstructor::class)

View file

@ -25,10 +25,10 @@ import org.meshtastic.core.database.entity.MeshLog
@Dao
interface MeshLogDao {
@Query("SELECT * FROM log ORDER BY received_date DESC LIMIT 0,:maxItem")
@Query("SELECT * FROM log ORDER BY received_date DESC LIMIT :maxItem")
fun getAllLogs(maxItem: Int): Flow<List<MeshLog>>
@Query("SELECT * FROM log ORDER BY received_date ASC LIMIT 0,:maxItem")
@Query("SELECT * FROM log ORDER BY received_date ASC LIMIT :maxItem")
fun getAllLogsInReceiveOrder(maxItem: Int): Flow<List<MeshLog>>
/**
@ -40,7 +40,7 @@ interface MeshLogDao {
"""
SELECT * FROM log
WHERE from_num = :fromNum AND (:portNum = -1 OR port_num = :portNum)
ORDER BY received_date DESC LIMIT 0,:maxItem
ORDER BY received_date DESC LIMIT :maxItem
""",
)
fun getLogsFrom(fromNum: Int, portNum: Int, maxItem: Int): Flow<List<MeshLog>>

View file

@ -35,6 +35,9 @@ interface NodeInfoDao {
companion object {
const val KEY_SIZE = 32
/** SQLite has a limit of ~999 bind parameters per query. */
const val MAX_BIND_PARAMS = 999
}
/**
@ -281,9 +284,15 @@ interface NodeInfoDao {
@Transaction
suspend fun getNodeByNum(num: Int): NodeWithRelations?
@Query("SELECT * FROM nodes WHERE num IN (:nodeNums)")
suspend fun getNodeEntitiesByNums(nodeNums: List<Int>): List<NodeEntity>
@Query("SELECT * FROM nodes WHERE public_key = :publicKey LIMIT 1")
suspend fun findNodeByPublicKey(publicKey: ByteString?): NodeEntity?
@Query("SELECT * FROM nodes WHERE public_key IN (:publicKeys)")
suspend fun findNodesByPublicKeys(publicKeys: List<ByteString>): List<NodeEntity>
@Upsert suspend fun doUpsert(node: NodeEntity)
@Transaction
@ -297,11 +306,77 @@ interface NodeInfoDao {
@Query("UPDATE nodes SET notes = :notes WHERE num = :num")
suspend fun setNodeNotes(num: Int, notes: String)
/**
* Batch version of [getVerifiedNodeForUpsert]. Pre-fetches all existing nodes and public-key conflicts in two
* queries instead of N individual queries, then processes each node in memory.
*/
@Suppress("NestedBlockDepth")
private suspend fun getVerifiedNodesForUpsert(incomingNodes: List<NodeEntity>): List<NodeEntity> {
// Prepare all incoming nodes (populate denormalized fields)
incomingNodes.forEach { node ->
node.publicKey = node.user.public_key
if (node.user.hw_model != HardwareModel.UNSET) {
node.longName = node.user.long_name
node.shortName = node.user.short_name
} else {
node.longName = null
node.shortName = null
}
}
// Batch fetch all existing nodes by num (chunked for SQLite bind-param limit)
val existingNodesMap =
incomingNodes
.map { it.num }
.chunked(MAX_BIND_PARAMS)
.flatMap { getNodeEntitiesByNums(it) }
.associateBy { it.num }
// Partition into updates vs. inserts and resolve existing nodes in-memory
val result = mutableListOf<NodeEntity>()
val newNodes = mutableListOf<NodeEntity>()
for (incoming in incomingNodes) {
val existing = existingNodesMap[incoming.num]
if (existing != null) {
result.add(handleExistingNodeUpsertValidation(existing, incoming))
} else {
newNodes.add(incoming)
}
}
// Batch validate new nodes' public keys (one query instead of N)
val publicKeysToCheck = newNodes.mapNotNull { node -> node.publicKey?.takeIf { it.size > 0 } }.distinct()
val pkConflicts =
if (publicKeysToCheck.isNotEmpty()) {
publicKeysToCheck
.chunked(MAX_BIND_PARAMS)
.flatMap { findNodesByPublicKeys(it) }
.associateBy { it.publicKey }
} else {
emptyMap()
}
for (newNode in newNodes) {
if ((newNode.publicKey?.size ?: 0) > 0) {
val conflicting = pkConflicts[newNode.publicKey]
if (conflicting != null && conflicting.num != newNode.num) {
result.add(conflicting)
} else {
result.add(newNode)
}
} else {
result.add(newNode)
}
}
return result
}
@Transaction
suspend fun installConfig(mi: MyNodeEntity, nodes: List<NodeEntity>) {
clearMyNodeInfo()
setMyNodeInfo(mi)
putAll(nodes.map { getVerifiedNodeForUpsert(it) })
putAll(getVerifiedNodesForUpsert(nodes))
}
/**

View file

@ -18,7 +18,9 @@ package org.meshtastic.core.database.dao
import androidx.paging.PagingSource
import androidx.room3.Dao
import androidx.room3.Insert
import androidx.room3.MapColumn
import androidx.room3.OnConflictStrategy
import androidx.room3.Query
import androidx.room3.Transaction
import androidx.room3.Update
@ -326,8 +328,15 @@ interface PacketDao {
)
suspend fun findPacketBySfppHash(hash: ByteString): Packet?
@Transaction
suspend fun getQueuedPackets(): List<DataPacket>? = getDataPackets().filter { it.status == MessageStatus.QUEUED }
@Query(
"""
SELECT data FROM packet
WHERE (myNodeNum = 0 OR myNodeNum = (SELECT myNodeNum FROM my_node))
AND json_extract(data, '${"$"}.status') = 'QUEUED'
ORDER BY received_time ASC
""",
)
suspend fun getQueuedPackets(): List<DataPacket>
@Query(
"""
@ -359,23 +368,24 @@ interface PacketDao {
@Upsert suspend fun upsertContactSettings(contacts: List<ContactSettings>)
@Insert(onConflict = OnConflictStrategy.IGNORE)
suspend fun insertContactSettingsIgnore(contacts: List<ContactSettings>)
@Query("UPDATE contact_settings SET muteUntil = :muteUntil WHERE contact_key IN (:contactKeys)")
suspend fun updateMuteUntil(contactKeys: List<String>, muteUntil: Long)
@Transaction
suspend fun setMuteUntil(contacts: List<String>, until: Long) {
val contactList = contacts.map { contact ->
// Always mute
val absoluteMuteUntil =
if (until == Long.MAX_VALUE) {
Long.MAX_VALUE
} else if (until == 0L) { // unmute
0L
} else {
nowMillis + until
}
getContactSettings(contact)?.copy(muteUntil = absoluteMuteUntil)
?: ContactSettings(contact_key = contact, muteUntil = absoluteMuteUntil)
}
upsertContactSettings(contactList)
val absoluteMuteUntil =
when {
until == Long.MAX_VALUE -> Long.MAX_VALUE
until == 0L -> 0L
else -> nowMillis + until
}
// Ensure rows exist for all contacts (IGNORE avoids overwriting existing data)
insertContactSettingsIgnore(contacts.map { ContactSettings(contact_key = it) })
// Atomic column-level update — no read-then-write race
updateMuteUntil(contacts, absoluteMuteUntil)
}
@Upsert suspend fun insert(reaction: ReactionEntity)
@ -479,9 +489,10 @@ interface PacketDao {
val indexMap =
oldSettings
.mapIndexed { oldIndex, oldChannel ->
val pskMatches = newSettings.mapIndexedNotNull { index, channel ->
if (channel.psk == oldChannel.psk) index to channel else null
}
val pskMatches =
newSettings.mapIndexedNotNull { index, channel ->
if (channel.psk == oldChannel.psk) index to channel else null
}
val newIndex =
when {

View file

@ -118,6 +118,7 @@ data class MetadataEntity(
Index(value = ["hops_away"]),
Index(value = ["is_favorite"]),
Index(value = ["last_heard", "is_favorite"]),
Index(value = ["public_key"]),
],
)
data class NodeEntity(

View file

@ -74,6 +74,9 @@ data class PacketEntity(
Index(value = ["contact_key"]),
Index(value = ["contact_key", "port_num", "received_time"]),
Index(value = ["packet_id"]),
Index(value = ["received_time"]),
Index(value = ["filtered"]),
Index(value = ["read"]),
],
)
data class Packet(
@ -98,9 +101,12 @@ data class Packet(
fun getRelayNode(relayNodeId: Int, nodes: List<Node>, ourNodeNum: Int?): Node? {
val relayNodeIdSuffix = relayNodeId and RELAY_NODE_SUFFIX_MASK
val candidateRelayNodes = nodes.filter {
it.num != ourNodeNum && it.lastHeard != 0 && (it.num and RELAY_NODE_SUFFIX_MASK) == relayNodeIdSuffix
}
val candidateRelayNodes =
nodes.filter {
it.num != ourNodeNum &&
it.lastHeard != 0 &&
(it.num and RELAY_NODE_SUFFIX_MASK) == relayNodeIdSuffix
}
val closestRelayNode =
if (candidateRelayNodes.size == 1) {