fix: address remaining Copilot review comments

- AndroidCodec2Encoder: VAD returns null instead of ByteArray(0) to prevent
  empty packet transmission; fix stale KDoc ref (Codec2Jni -> Codec2JNI)
- AndroidAudioPlayer: fix mojibake encoding in comments
- VoiceBurstViewModel: ENCODING_FAILED -> RECORDING_FAILED in recorder error
  path; fix 'broadcasted' log to 'sent to \'; fix mojibake in KDoc
- VoiceBurstButton: remove reference to non-existent isVisible property
- FeatureVoiceBurstAndroidModule: remove dead ref to FeatureAchievementsAndroidModule
- Codec2Jni: replace duplicate JNI class with typealias to Codec2JNI
This commit is contained in:
Chris7X 2026-04-06 00:49:16 +02:00
parent b885f728a2
commit da94d6985d
6 changed files with 47 additions and 78 deletions

View file

@ -17,38 +17,8 @@
package org.meshtastic.codec2
/**
* JNI wrapper for the Codec2 library.
* This class is the interface between Kotlin/JVM and the C codec logic.
* Backwards-compatible alias to the canonical Codec2 JNI wrapper used by
* the voiceburst feature. The actual implementation lives in
* [com.geeksville.mesh.voiceburst.Codec2JNI].
*/
class Codec2Jni {
/**
* Encodes 16-bit mono PCM audio (8kHz) into Codec2 compressed frames.
* @param pcm Input audio data (ShortArray)
* @return Compressed byte array or null on error
*/
external fun encode(pcm: ShortArray): ByteArray?
/**
* Decodes Codec2 compressed frames back into 16-bit mono PCM audio (8kHz).
* @param compressed Compressed audio data (ByteArray)
* @return Decoded ShortArray or null on error
*/
external fun decode(compressed: ByteArray): ShortArray?
/**
* Gets the current Codec2 mode (e.g., 3200, 2400, etc.).
*/
external fun getMode(): Int
companion object {
init {
try {
System.loadLibrary("codec2_jni")
} catch (e: UnsatisfiedLinkError) {
// Logger not available in this core-module, using println
println("Critical: Could not load codec2_jni library")
}
}
}
}
typealias Codec2Jni = com.geeksville.mesh.voiceburst.Codec2JNI

View file

@ -34,12 +34,12 @@ private const val TAG = "AndroidAudioPlayer"
/**
* Android implementation of [AudioPlayer].
*
* Fixes compared to previous versions:
* - BUG: MODE_STATIC with bufferSize < minBufferSize ââ â STATE_NO_STATIC_DATA (state=2) ââ â silence.
* Key implementation notes:
* - BUG: MODE_STATIC with bufferSize < minBufferSize -> STATE_NO_STATIC_DATA (state=2) -> silence.
* FIX: bufferSize = maxOf(minBufferSize, pcmBytes) ALWAYS, even in static mode.
* - Using MODE_STREAM: simpler and avoids the STATE_NO_STATIC_DATA issue.
* For 1 second at 8kHz (16000 bytes) MODE_STREAM is more than adequate.
* - USAGE_MEDIA ââ â main speaker (not earpiece).
* - USAGE_MEDIA -> main speaker (not earpiece).
* - [playingFilePath] StateFlow to sync play/stop icons in the UI.
*/
class AndroidAudioPlayer(
@ -63,7 +63,7 @@ class AndroidAudioPlayer(
}
if (pcmData.isEmpty()) {
Logger.w(tag = TAG) { "PCM data is empty — skipping playback" }
Logger.w(tag = TAG) { "PCM data is empty -- skipping playback" }
onComplete()
return
}
@ -80,7 +80,7 @@ class AndroidAudioPlayer(
}
// CRITICAL: bufferSize must always be >= minBufferSize.
// With MODE_STATIC, if bufferSize < minBufferSize → state=STATE_NO_STATIC_DATA=2 → silence.
// With MODE_STATIC, if bufferSize < minBufferSize -> state=STATE_NO_STATIC_DATA=2 -> silence.
// MODE_STREAM is used for simplicity and robustness.
val pcmBytes = pcmData.size * Short.SIZE_BYTES
val bufferSize = maxOf(minBufferSize, pcmBytes)

View file

@ -29,18 +29,18 @@ private const val TAG = "AndroidCodec2Encoder"
/**
* Android implementation of [Codec2Encoder].
*
* When [Codec2Jni.isAvailable] = true, uses libcodec2 via JNI (real voice audio).
* When [Codec2JNI.isAvailable] is true, uses libcodec2 via JNI (real voice audio).
* Otherwise falls back to STUB mode (440Hz sine wave) for development/CI/builds without .so.
*
* Codec2 700B parameters:
* - Sample rate input: 8000 Hz
* - Frame: 40ms = 320 samples
* - Bytes per frame: 4
* - 1 second: 25 frames à 4 bytes = 100 bytes
* - 1 second: 25 frames x 4 bytes = 100 bytes
*
* Preprocessing applied before encoding (JNI mode only):
* 1. Amplitude normalization (brings to 70% of Short.MAX_VALUE)
* 2. Simple VAD: if RMS < threshold, returns silence without encoding
* 2. Simple VAD: if RMS < threshold, returns null without encoding
*
* JNI Lifecycle:
* The Codec2 handle is created in the constructor and destroyed in [close()].
@ -63,14 +63,14 @@ class AndroidCodec2Encoder : Codec2Encoder, AutoCloseable {
" bytesPerFrame=${Codec2JNI.getBytesPerFrame(Codec2JNI.MODE_700C)}"
}
} else {
Logger.e(tag = TAG) { "Codec2JNI.create() returned 0 — falling back to stub mode" }
Logger.e(tag = TAG) { "Codec2JNI.create() returned 0 -- falling back to stub mode" }
codec2Handle = 0L
isStub = true
}
} else {
codec2Handle = 0L
isStub = true
Logger.w(tag = TAG) { "Codec2 JNI not available — stub mode (440Hz sine wave)" }
Logger.w(tag = TAG) { "Codec2 JNI not available -- stub mode (440Hz sine wave)" }
}
}
@ -81,17 +81,17 @@ class AndroidCodec2Encoder : Codec2Encoder, AutoCloseable {
}
}
// ─── encode ───────────────────────────────────────────────────────────────
// --- encode -------------------------------------------------------------
/**
* Encodes 16-bit mono 8000Hz PCM into Codec2 700B bytes.
*
* Accepts an array of any length â it is split into frames
* Accepts an array of any length -- it is split into frames
* of [SAMPLES_PER_FRAME] samples. The last incomplete frame is
* padded with zeros (zero-padding).
*
* @param pcmData PCM samples from the microphone (8000 Hz, mono, signed 16-bit)
* @return ByteArray with Codec2 bytes, null if input is empty
* @return ByteArray with Codec2 bytes, null if input is empty or silence detected
*/
override fun encode(pcmData: ShortArray): ByteArray? {
if (pcmData.isEmpty()) return null
@ -110,11 +110,11 @@ class AndroidCodec2Encoder : Codec2Encoder, AutoCloseable {
// Preprocessing: normalization
val normalized = normalize(pcmData)
// VAD: do not send silence
// VAD: do not send silence -- return null so the ViewModel skips transmission
val rms = computeRms(normalized)
if (rms < SILENCE_RMS_THRESHOLD) {
Logger.d(tag = TAG) { "VAD: silence detected (RMS=$rms) — skipping encode" }
return ByteArray(0)
Logger.d(tag = TAG) { "VAD: silence detected (RMS=$rms) -- skipping encode" }
return null
}
// Calculate needed frames (round up)
@ -132,7 +132,6 @@ class AndroidCodec2Encoder : Codec2Encoder, AutoCloseable {
} else {
ShortArray(samplesPerFrame).also {
normalized.copyInto(it, 0, inStart, inEnd)
// remaining already 0 by default
}
}
@ -147,13 +146,13 @@ class AndroidCodec2Encoder : Codec2Encoder, AutoCloseable {
}
Logger.d(tag = TAG) {
"Encode JNI: ${pcmData.size} samples → ${output.size} bytes " +
"($frameCount frames × $bytesPerFrame bytes)"
"Encode JNI: ${pcmData.size} samples -> ${output.size} bytes " +
"($frameCount frames x $bytesPerFrame bytes)"
}
return output
}
// ─── decode ───────────────────────────────────────────────────────────────
// --- decode -------------------------------------------------------------
/**
* Decodes Codec2 700B bytes into 16-bit mono 8000Hz PCM samples.
@ -178,7 +177,7 @@ class AndroidCodec2Encoder : Codec2Encoder, AutoCloseable {
if (codec2Data.size % bytesPerFrame != 0) {
Logger.w(tag = TAG) {
"Decode: input size (${codec2Data.size}) not a multiple of " +
"bytesPerFrame ($bytesPerFrame) — truncating to complete frame"
"bytesPerFrame ($bytesPerFrame) -- truncating to complete frame"
}
}
@ -203,16 +202,16 @@ class AndroidCodec2Encoder : Codec2Encoder, AutoCloseable {
}
Logger.d(tag = TAG) {
"Decode JNI: ${codec2Data.size} bytes → ${output.size} samples " +
"($frameCount frames × $samplesPerFrame samples)"
"Decode JNI: ${codec2Data.size} bytes -> ${output.size} samples " +
"($frameCount frames x $samplesPerFrame samples)"
}
return output
}
// ─── Preprocessing helpers ────────────────────────────────────────────────
// --- Preprocessing helpers ----------------------------------------------
/**
* Normalizes the signal amplitude to [TARGET_AMPLITUDE] Ã Short.MAX_VALUE.
* Normalizes the signal amplitude to [TARGET_AMPLITUDE] x Short.MAX_VALUE.
* Prevents clipping and improves Codec2 quality on low-volume voices.
*/
private fun normalize(pcm: ShortArray): ShortArray {
@ -220,7 +219,6 @@ class AndroidCodec2Encoder : Codec2Encoder, AutoCloseable {
if (maxAmp < 1f) return pcm // absolute silence
val gain = (TARGET_AMPLITUDE * Short.MAX_VALUE) / maxAmp
// Limit maximum gain to 10x to avoid excessive noise amplification
val clampedGain = minOf(gain, MAX_GAIN)
return ShortArray(pcm.size) { i ->
@ -238,12 +236,12 @@ class AndroidCodec2Encoder : Codec2Encoder, AutoCloseable {
return sqrt(sumSquares / pcm.size)
}
// ─── Stub (fallback when JNI is not available) ─────────────────────────
// --- Stub (fallback when JNI is not available) --------------------------
private fun encodeStub(pcmData: ShortArray): ByteArray {
val frameCount = (pcmData.size + SAMPLES_PER_FRAME - 1) / SAMPLES_PER_FRAME
Logger.w(tag = TAG) {
"Codec2 STUB encode: ${pcmData.size} samples → ${frameCount * BYTES_PER_FRAME} bytes (zeros)"
"Codec2 STUB encode: ${pcmData.size} samples -> ${frameCount * BYTES_PER_FRAME} bytes (zeros)"
}
return ByteArray(frameCount * BYTES_PER_FRAME) { 0x00 }
}
@ -253,10 +251,10 @@ class AndroidCodec2Encoder : Codec2Encoder, AutoCloseable {
val totalSamples = frameCount * SAMPLES_PER_FRAME
Logger.w(tag = TAG) {
"Codec2 STUB decode: ${codec2Data.size} bytes → $totalSamples samples (440Hz sine wave)"
"Codec2 STUB decode: ${codec2Data.size} bytes -> $totalSamples samples (440Hz sine wave)"
}
// Generate 440Hz sine wave (A4) — audible and recognizable
// Generate 440Hz sine wave (A4) -- audible and recognizable
val sampleRate = 8000.0
val frequency = 440.0
val amplitude = Short.MAX_VALUE * 0.3 // 30% volume
@ -277,12 +275,12 @@ class AndroidCodec2Encoder : Codec2Encoder, AutoCloseable {
/** Target amplitude for normalization (70% of Short.MAX_VALUE). */
private const val TARGET_AMPLITUDE = 0.70f
/** Maximum gain applied by normalization (10×). */
/** Maximum gain applied by normalization (10x). */
private const val MAX_GAIN = 10.0f
/**
* RMS threshold below which the frame is considered silence (simple VAD).
* 200.0 on the 0-32767 scale is approximately -44 dBFS â normal voice is 2000-8000.
* 200.0 on the 0-32767 scale is approximately -44 dBFS -- normal voice is 2000-8000.
*/
private const val SILENCE_RMS_THRESHOLD = 200.0
}

View file

@ -43,7 +43,7 @@ import org.meshtastic.feature.voiceburst.repository.VoiceBurstRepository
/**
* Koin module for the Voice Burst feature module.
*
* Follows the same pattern as [FeatureAchievementsAndroidModule]:
* Follows the standard Android feature-module pattern:
* - Context and Android-only APIs remain in androidMain
* - commonMain has no direct Android dependencies
*/

View file

@ -54,8 +54,9 @@ import org.jetbrains.compose.resources.stringResource
/**
* PTT (Push-To-Talk) button for Voice Burst.
*
* Visible only if [VoiceBurstViewModel.isVisible] == true (feature flag enabled).
* Disabled during encoding/sending/rate limit.
* Render this composable only when Voice Burst is available; callers should not render
* it for [VoiceBurstState.Unsupported].
* Disabled during non-interactive processing states such as encoding and sending.
*
* Visual states:
* Idle -> Mic icon, normal color

View file

@ -44,8 +44,8 @@ private const val TAG = "VoiceBurstViewModel"
* ViewModel handling the lifecycle and orchestration of Voice Burst messaging.
*
* Full pipeline:
* MIC â [AudioRecorder] â PCM â [Codec2Encoder.encode] â bytes â [VoiceBurstRepository.sendBurst]
* RADIO â [VoiceBurstRepository.incomingBursts] â bytes â [Codec2Encoder.decode] â PCM â [AudioPlayer]
* MIC -> [AudioRecorder] -> PCM -> [Codec2Encoder.encode] -> bytes -> [VoiceBurstRepository.sendBurst]
* RADIO -> [VoiceBurstRepository.incomingBursts] -> bytes -> [Codec2Encoder.decode] -> PCM -> [AudioPlayer]
*
* Rate limiting is enforced: minimum [RATE_LIMIT_MS] between consecutive bursts.
*
@ -103,7 +103,7 @@ class VoiceBurstViewModel(
.launchIn(viewModelScope)
}
// ─── Receiver-side logic ────────────────────────────────────────────────
// --- Receiver-side logic ------------------------------------------------
private fun onBurstReceived(payload: VoiceBurstPayload) {
Logger.i(tag = TAG) {
@ -114,7 +114,7 @@ class VoiceBurstViewModel(
val pcmData = encoder.decode(payload.audioData)
if (pcmData == null || pcmData.isEmpty()) {
Logger.e(tag = TAG) { "Decoding failed — no PCM samples to play" }
Logger.e(tag = TAG) { "Decoding failed -- no PCM samples to play" }
_state.update { VoiceBurstState.Idle }
return
}
@ -128,7 +128,7 @@ class VoiceBurstViewModel(
}
}
// ─── Sender-side (PTT) recording ──────────────────────────────────────
// --- Sender-side (PTT) recording ----------------------------------------
/**
* Initiates microphone recording if the state machine is [Idle].
@ -183,7 +183,7 @@ class VoiceBurstViewModel(
uiTimerJob?.cancel()
uiTimerJob = null
Logger.e(tag = TAG) { "Hardware recording error: ${error.message}" }
_state.update { VoiceBurstState.Error(VoiceBurstError.ENCODING_FAILED) }
_state.update { VoiceBurstState.Error(VoiceBurstError.RECORDING_FAILED) }
},
maxDurationMs = MAX_DURATION_MS,
)
@ -201,7 +201,7 @@ class VoiceBurstViewModel(
audioRecorder.stopRecording()
}
// ─── Encoding and Dispatch ──────────────────────────────────────────────
// --- Encoding and Dispatch ----------------------------------------------
internal fun onRecordingComplete(pcmData: ShortArray, durationMs: Int) {
_state.update { VoiceBurstState.Encoding }
@ -215,9 +215,9 @@ class VoiceBurstViewModel(
}
if (encoder.isStub) {
Logger.w(tag = TAG) { "Running with Codec2 stub — transmission will not be intelligible" }
Logger.w(tag = TAG) { "Running with Codec2 stub -- transmission will not be intelligible" }
} else {
Logger.i(tag = TAG) { "Enc JNI Success: ${pcmData.size} samples → ${audioBytes.size} bytes" }
Logger.i(tag = TAG) { "Enc JNI Success: ${pcmData.size} samples -> ${audioBytes.size} bytes" }
}
val payload = VoiceBurstPayload(