diff --git a/lyra/android_example/java/com/example/android/lyra/MainActivity.java b/lyra/android_example/java/com/example/android/lyra/MainActivity.java index 5b921b09..61000523 100644 --- a/lyra/android_example/java/com/example/android/lyra/MainActivity.java +++ b/lyra/android_example/java/com/example/android/lyra/MainActivity.java @@ -136,10 +136,26 @@ private synchronized void encodeAndDecodeMicDataToSpeaker(int bitrate) { if (micDataShortsWritten == 0) { return; } - // Whatever micData holds, encode and decode with Lyra. - short[] decodedAudio = encodeAndDecodeSamples(micData, micDataShortsWritten, bitrate, + // Encode the recorded wave inside the micData with Lyra + short[] encodedAudio = encodeSamples(micData, micDataShortsWritten, SAMPLE_RATE, bitrate, weightsDirectory); + // Notes for Chase: + // The bytes in "encodedAudio" array contains all the data you wish to transmit via our LoRa network. + // The length of the short array "encodedAudio" is typically larger than the maximum payload + // size allowed by our LoRa protocol, so make sure you manually separate this array into several + // small packets (e.g. ~100 bytes long) and transmit them via LoRa network sequentially. + // + // Another thing to note is that the Lyra codec can tolarate packet losses up to some level. Therefore, it + // might be useful to try different sizes of the small packets to find which size(s) give you the best audio + // experience when packet loss is present. + // + // At the receiving side, when you receive a packet, you can call "decodeSample" function to decode a Lyra packet. + // The decoded audio should be buffered into an array first and then play the buffered audio (see the example below). + // In practice, do not assume that the receiving party will receive 100% of the transmitted packets. Therefore, just + // decode what you receive - the interrupting audio may still be useful to the end users. + short[] decodedAudio = decodeSamples(encodedAudio, micDataShortsWritten, SAMPLE_RATE, + bitrate, weightsDirectory); if (decodedAudio == null) { Log.e(TAG, "Failed to encode and decode microphone data."); return; @@ -289,11 +305,16 @@ private void copyWeightsAssetsToDirectory(String targetDirectory) { } /** - * A method that is implemented by the 'lyra_android_example' C++ library, which is packaged with + * Methods that are implemented by the 'lyra_android_example' C++ library, which is packaged with * this application. */ public native String lyraBenchmark(int numCondVectors, String modelBasePath); - public native short[] encodeAndDecodeSamples( - short[] samples, int sampleLength, int bitrate, String modelBasePath); + public native short[] encodeSamples( + short[] samples, int sampleLength, int sample_rate_Hz, + int bitrate, String modelBasePath); + + public native short[] decodeSamples( + short[] samples, int sampleLength, int sample_rate_Hz, + int bitrate, String modelBasePath); } diff --git a/lyra/android_example/jni_lyra_benchmark_lib.cc b/lyra/android_example/jni_lyra_benchmark_lib.cc index 0d6dde9d..5981b460 100644 --- a/lyra/android_example/jni_lyra_benchmark_lib.cc +++ b/lyra/android_example/jni_lyra_benchmark_lib.cc @@ -23,28 +23,104 @@ #include "lyra/lyra_benchmark_lib.h" #include "lyra/lyra_config.h" -extern "C" JNIEXPORT jshortArray JNICALL -Java_com_example_android_lyra_MainActivity_encodeAndDecodeSamples( - JNIEnv* env, jobject this_obj, jshortArray samples, jint sample_length, - jint bitrate, jstring model_base_path) { +extern "C" JNIEXPORT jshortArray + JNICALL + /** + * @brief Encode the audio samples with Lyra encoder + * + * @param env - JNI environment (do not change) + * @param this_obj - the Java object to interact with (do not change) + * @param samples - the input audio samples (waves) + * @param sample_length - the length of the audio samples (waves) + * @param sample_rate_Hz - the sampling rate of the audio samples (waves) + * @param bitrate - the bit rate to be encoded to + * @param model_base_path - the path of the trained model + */ + Java_com_example_android_lyra_MainActivity_encodeSamples( + JNIEnv* env, jobject this_obj, jshortArray samples, jint sample_length, + jint sample_rate_Hz, jint bitrate, jstring model_base_path) { std::vector samples_vector(sample_length); std::vector features; - std::vector decoded_audio; - jshortArray java_decoded_audio = nullptr; + std::vector encoded_samples; + jshortArray java_encoded_samples = nullptr; env->GetShortArrayRegion(samples, jsize{0}, sample_length, &samples_vector[0]); + const char* cpp_model_base_path = env->GetStringUTFChars(model_base_path, 0); + + absl::BitGen gen; + if (chromemedia::codec::EncodeWav( + samples_vector, chromemedia::codec::kNumChannels, sample_rate_Hz, + bitrate, false, false, cpp_model_base_path, &features)) { + // write the encoded audio samples to a Java byte array to return + java_encoded_samples = env->NewShortArray(features.size()); + env->SetShortArrayRegion(java_encoded_samples, 0, features.size(), + &encoded_samples[0]); + return java_encoded_samples; + + } else + return nullptr; + /* // Original codes + if (chromemedia::codec::EncodeWav( + samples_vector, chromemedia::codec::kNumChannels, sample_rate_Hz, + bitrate, false, false, cpp_model_base_path, &features) && + chromemedia::codec::DecodeFeatures( + features, chromemedia::codec::BitrateToPacketSize(bitrate), + false, gen, decoder.get(), + nullptr, &decoded_audio)) { + java_decoded_audio = env->NewShortArray(decoded_audio.size()); + env->SetShortArrayRegion(java_decoded_audio, 0, decoded_audio.size(), + &decoded_audio[0]); + } + env->ReleaseStringUTFChars(model_base_path, cpp_model_base_path); + + return java_decoded_audio; + */ +} + +extern "C" JNIEXPORT jshortArray + JNICALL + /** + * @brief Decode the encoded bytes back to audio samples with Lyra decoder + * + * @param env - JNI environment (do not change) + * @param this_obj - the Java object to interact with (do not change) + * @param samples - the input audio samples (waves) + * @param sample_length - the length of the audio samples (waves) + * @param sample_rate_Hz - the sampling rate of the audio samples (waves) + * @param bitrate - the bit rate to be encoded to + * @param model_base_path - the path of the trained model + */ + Java_com_example_android_lyra_MainActivity_decodeSamples( + JNIEnv* env, jobject this_obj, jshortArray features, jshortArray output, + jint feature_length, jint sample_rate_Hz, jint bitrate, + jstring model_base_path) { + std::vector feature_vector(feature_length); + std::vector feature_vector_bytes; + //std::vector features; + std::vector decoded_audio; + jshortArray java_decoded_audio = nullptr; + // convert the Java data type to its corresponding C++ data type + env->GetShortArrayRegion(features, jsize{0}, feature_length, &(feature_vector[0])); + +// convert the int16_t vector to the uint8_t - stay for now but can be optimised later +// Note: the reason for adding the following loop is that "GetShortArrayRegion" function +// only takes int16_t as the type of its last argument but the "DecodeFeature" +// function accepts only uint8_t vector as the input. +feature_vector_bytes.reserve(feature_length); +for (int16_t num : feature_vector) { + feature_vector_bytes.push_back(static_cast(num)); +} + + const char* cpp_model_base_path = env->GetStringUTFChars(model_base_path, 0); std::unique_ptr decoder = chromemedia::codec::LyraDecoder::Create( 16000, chromemedia::codec::kNumChannels, cpp_model_base_path); absl::BitGen gen; - if (chromemedia::codec::EncodeWav( - samples_vector, chromemedia::codec::kNumChannels, 16000, bitrate, - false, false, cpp_model_base_path, &features) && - chromemedia::codec::DecodeFeatures( - features, chromemedia::codec::BitrateToPacketSize(bitrate), + if (chromemedia::codec::DecodeFeatures( + feature_vector_bytes, chromemedia::codec::BitrateToPacketSize(bitrate), /*randomize_num_samples_requested=*/false, gen, decoder.get(), nullptr, &decoded_audio)) { java_decoded_audio = env->NewShortArray(decoded_audio.size()); diff --git a/out/liblyra_android_example.so b/out/liblyra_android_example.so new file mode 120000 index 00000000..ce684aea --- /dev/null +++ b/out/liblyra_android_example.so @@ -0,0 +1 @@ +/private/var/tmp/_bazel_xiaoyuai/607ebfb659c5d6660b0a165bb7a98ad9/execroot/__main__/bazel-out/android-armeabi-v7a-opt/bin/_nativedeps/189_54_0_0_3_b8008ea2f3428aa5916c90d6259b0fb8881dc0780e88a1c50ab4ed64c0e51ed8.so \ No newline at end of file diff --git a/out/lyra_android_example.apk b/out/lyra_android_example.apk new file mode 100755 index 00000000..03d946f0 Binary files /dev/null and b/out/lyra_android_example.apk differ diff --git a/out/lyra_android_example_deploy.jar b/out/lyra_android_example_deploy.jar new file mode 100755 index 00000000..4be78e8a Binary files /dev/null and b/out/lyra_android_example_deploy.jar differ diff --git a/out/lyra_android_example_unsigned.apk b/out/lyra_android_example_unsigned.apk new file mode 100755 index 00000000..fdede9ff Binary files /dev/null and b/out/lyra_android_example_unsigned.apk differ