Initial Commit

main
Matthew 2 months ago
parent 35860d53a5
commit cff42bc5c3

1
app/.gitignore vendored

@ -0,0 +1 @@
/build

@ -0,0 +1,72 @@
plugins {
id 'com.android.application'
}
android {
namespace 'com.xypower.dblstreams'
compileSdk 33
defaultConfig {
applicationId "com.xypower.dblstreams"
minSdk 24
targetSdk 30
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
externalNativeBuild {
cmake {
// cppFlags '-std=c++17 -frtti -fexceptions -Wno-error=format-security'
cppFlags '-std=c++17 -fexceptions -Wno-error=format-security -fopenmp'
// cppFlags '-std=c++17 -Wno-error=format-security'
// arguments "-DANDROID_STL=c++_shared"
arguments "-DHDRPLUS_ROOT=" + hdrplusroot
abiFilters 'arm64-v8a'
// setAbiFilters(['arm64-v8a'])
}
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
debuggable true
jniDebuggable true
}
}
sourceSets {
main {
jniLibs.srcDirs = ['libs']
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
externalNativeBuild {
cmake {
path file('src/main/cpp/CMakeLists.txt')
version '3.22.1'
}
}
buildFeatures {
viewBinding true
}
}
dependencies {
implementation 'androidx.appcompat:appcompat:1.6.1'
implementation 'com.google.android.material:material:1.8.0'
implementation 'androidx.constraintlayout:constraintlayout:2.1.4'
testImplementation 'junit:junit:4.13.2'
androidTestImplementation 'androidx.test.ext:junit:1.1.5'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.5.1'
// https://mvnrepository.com/artifact/com.arthenica/ffmpeg-kit-full
// implementation files('libs/ffmpeg-kit-full-6.0-2.LTS.aar')
}

@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

@ -0,0 +1,26 @@
package com.xypower.dblstreams;
import android.content.Context;
import androidx.test.platform.app.InstrumentationRegistry;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
/**
* Instrumented test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
assertEquals("com.xypower.dblstreams", appContext.getPackageName());
}
}

@ -0,0 +1,37 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools">
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<application
android:allowBackup="true"
android:dataExtractionRules="@xml/data_extraction_rules"
android:fullBackupContent="@xml/backup_rules"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:requestLegacyExternalStorage="true"
android:theme="@style/Theme.DblStreams"
tools:targetApi="30">
<activity
android:name=".MainActivity"
android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

@ -0,0 +1,73 @@
# For more information about using CMake with Android Studio, read the
# documentation: https://d.android.com/studio/projects/add-native-code.html.
# For more examples on how to use CMake, see https://github.com/android/ndk-samples.
# Sets the minimum CMake version required for this project.
cmake_minimum_required(VERSION 3.22.1)
# Declares the project name. The project name can be accessed via ${ PROJECT_NAME},
# Since this is the top level CMakeLists.txt, the project name is also accessible
# with ${CMAKE_PROJECT_NAME} (both CMake variables are in-sync within the top level
# build script scope).
project("dblstreams")
add_definitions(-DUSING_FFMPEG)
# Find required packages
# find_package(camera2ndk REQUIRED)
# find_package(mediandk REQUIRED)
# Find FFmpeg
#find_path(FFMPEG_INCLUDE_DIR libavformat/avformat.h)
#find_library(AVCODEC_LIBRARY avcodec)
#find_library(AVFORMAT_LIBRARY avformat)
#find_library(AVUTIL_LIBRARY avutil)
# OpenMP
find_package(OpenMP REQUIRED)
include_directories(D:/Workspace/deps/hdrplus_libs/${ANDROID_ABI}/include)
link_directories(D:/Workspace/deps/hdrplus_libs/${ANDROID_ABI}/lib)
include_directories(
${CMAKE_SOURCE_DIR}/include
${FFMPEG_INCLUDE_DIR}
)
# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.
#
# In this top level CMakeLists.txt, ${CMAKE_PROJECT_NAME} is used to define
# the target library name; in the sub-module's CMakeLists.txt, ${PROJECT_NAME}
# is preferred for the same purpose.
#
# In order to load a library into your app from Java/Kotlin, you must call
# System.loadLibrary() and pass the name of the library defined here;
# for GameActivity/NativeActivity derived applications, the same library name must be
# used in the AndroidManifest.xml file.
add_library(${CMAKE_PROJECT_NAME} SHARED
# List C/C++ source files with relative paths to this CMakeLists.txt.
native-lib.cpp
camera_manager.cpp
encoder_manager.cpp
rtsp_streamer.cpp
Utils.cpp
)
# Specifies libraries CMake should link to your target library. You
# can link libraries from various origins, such as libraries defined in this
# build script, prebuilt third-party libraries, or Android system libraries.
target_link_libraries(${CMAKE_PROJECT_NAME} PUBLIC -fopenmp -static-openmp
# List libraries link to the target library
android
log
camera2ndk
mediandk
z m
-pthread
avcodec avfilter avformat avutil swresample swscale x264 postproc
OpenMP::OpenMP_CXX
)

@ -0,0 +1,42 @@
#include "utils.h"
#include <chrono>
#include <cstring>
namespace utils {
void YUV420ToNV21(const uint8_t* yuv420, uint8_t* nv21, int width, int height) {
int frameSize = width * height;
int uSize = frameSize / 4;
int vSize = frameSize / 4;
// Y
memcpy(nv21, yuv420, frameSize);
// VU (interleaved)
for (int i = 0; i < uSize; i++) {
nv21[frameSize + i * 2] = yuv420[frameSize + uSize + i]; // V
nv21[frameSize + i * 2 + 1] = yuv420[frameSize + i]; // U
}
}
void YUV420ToNV12(const uint8_t* yuv420, uint8_t* nv12, int width, int height) {
int frameSize = width * height;
int uSize = frameSize / 4;
int vSize = frameSize / 4;
// Y
memcpy(nv12, yuv420, frameSize);
// UV (interleaved)
for (int i = 0; i < uSize; i++) {
nv12[frameSize + i * 2] = yuv420[frameSize + i]; // U
nv12[frameSize + i * 2 + 1] = yuv420[frameSize + uSize + i]; // V
}
}
int64_t getCurrentTimeMicro() {
return std::chrono::duration_cast<std::chrono::microseconds>(
std::chrono::high_resolution_clock::now().time_since_epoch()).count();
}
} // namespace utils

@ -0,0 +1,21 @@
#ifndef UTILS_H
#define UTILS_H
#include <cstdint>
#include <cstddef>
// YUV conversion utilities
namespace utils {
// Convert YUV420 to NV21
void YUV420ToNV21(const uint8_t* yuv420, uint8_t* nv21, int width, int height);
// Convert YUV420 to NV12
void YUV420ToNV12(const uint8_t* yuv420, uint8_t* nv12, int width, int height);
// Get current timestamp in microseconds
int64_t getCurrentTimeMicro();
} // namespace utils
#endif // UTILS_H

@ -0,0 +1,243 @@
#include "camera_manager.h"
#include <android/log.h>
#include <chrono>
#include <thread>
#include <string>
#define LOG_TAG "CameraManager"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
CameraManager::CameraManager() {}
CameraManager::~CameraManager() {
stopCapture();
if (mCameraDevice) {
ACameraDevice_close(mCameraDevice);
mCameraDevice = nullptr;
}
if (mCameraManager) {
ACameraManager_delete(mCameraManager);
mCameraManager = nullptr;
}
}
bool CameraManager::initialize() {
mCameraManager = ACameraManager_create();
if (!mCameraManager) {
LOGE("Failed to create camera manager");
return false;
}
return true;
}
std::vector<std::string> CameraManager::getAvailableCameras() {
std::vector<std::string> cameraIds;
ACameraIdList* cameraIdList = nullptr;
ACameraManager_getCameraIdList(mCameraManager, &cameraIdList);
if (cameraIdList) {
for (int i = 0; i < cameraIdList->numCameras; i++) {
cameraIds.push_back(cameraIdList->cameraIds[i]);
}
ACameraManager_deleteCameraIdList(cameraIdList);
}
return cameraIds;
}
bool CameraManager::openCamera(const char* cameraId) {
ACameraDevice_stateCallbacks deviceStateCallbacks = {
.context = this,
.onDisconnected = onDeviceDisconnected,
.onError = onDeviceError
};
camera_status_t status = ACameraManager_openCamera(mCameraManager, cameraId, &deviceStateCallbacks, &mCameraDevice);
if (status != ACAMERA_OK || !mCameraDevice) {
LOGE("Failed to open camera: %d", status);
return false;
}
return true;
}
bool CameraManager::startCapture(int width, int height, FrameCallback callback) {
mFrameCallback = callback;
// Create ImageReader
media_status_t mediaStatus = AImageReader_new(
width, height, AIMAGE_FORMAT_YUV_420_888, 2, &mImageReader);
if (mediaStatus != AMEDIA_OK || !mImageReader) {
LOGE("Failed to create image reader: %d", mediaStatus);
return false;
}
// Set image reader callback
AImageReader_ImageListener listener = {
.context = this,
.onImageAvailable = imageCallback
};
AImageReader_setImageListener(mImageReader, &listener);
// Create output target
ANativeWindow* nativeWindow;
AImageReader_getWindow(mImageReader, &nativeWindow);
ACameraOutputTarget_create(nativeWindow, &mOutputTarget);
// Create capture request
ACameraDevice_createCaptureRequest(mCameraDevice, TEMPLATE_RECORD, &mCaptureRequest);
ACaptureRequest_addTarget(mCaptureRequest, mOutputTarget);
// Configure session
ACaptureSessionOutput* sessionOutput;
ACameraOutputTarget* outputTarget;
ACaptureSessionOutputContainer* outputContainer;
ACaptureSessionOutput_create(nativeWindow, &sessionOutput);
ACaptureSessionOutputContainer_create(&outputContainer);
ACaptureSessionOutputContainer_add(outputContainer, sessionOutput);
ACameraCaptureSession_stateCallbacks sessionStateCallbacks = {
.context = this,
.onClosed = onSessionClosed,
.onReady = onSessionReady,
.onActive = onSessionActive
};
camera_status_t status = ACameraDevice_createCaptureSession(
mCameraDevice, outputContainer, &sessionStateCallbacks, &mCaptureSession);
if (status != ACAMERA_OK) {
LOGE("Failed to create capture session: %d", status);
return false;
}
// Start repeating request
status = ACameraCaptureSession_setRepeatingRequest(
mCaptureSession, nullptr, 1, &mCaptureRequest, nullptr);
if (status != ACAMERA_OK) {
LOGE("Failed to start repeating request: %d", status);
return false;
}
mRunning = true;
return true;
}
void CameraManager::stopCapture() {
std::unique_lock<std::mutex> lock(mMutex);
if (mRunning) {
mRunning = false;
lock.unlock();
mCondVar.notify_all();
if (mCaptureSession) {
ACameraCaptureSession_stopRepeating(mCaptureSession);
ACameraCaptureSession_close(mCaptureSession);
mCaptureSession = nullptr;
}
if (mCaptureRequest) {
ACaptureRequest_free(mCaptureRequest);
mCaptureRequest = nullptr;
}
if (mOutputTarget) {
ACameraOutputTarget_free(mOutputTarget);
mOutputTarget = nullptr;
}
if (mImageReader) {
AImageReader_delete(mImageReader);
mImageReader = nullptr;
}
}
}
// Static callbacks
void CameraManager::onDeviceDisconnected(void* context, ACameraDevice* device) {
auto* manager = static_cast<CameraManager*>(context);
LOGI("Camera disconnected");
manager->stopCapture();
}
void CameraManager::onDeviceError(void* context, ACameraDevice* device, int error) {
auto* manager = static_cast<CameraManager*>(context);
LOGE("Camera error: %d", error);
manager->stopCapture();
}
void CameraManager::onSessionClosed(void* context, ACameraCaptureSession* session) {
LOGI("Camera session closed");
}
void CameraManager::onSessionReady(void* context, ACameraCaptureSession* session) {
LOGI("Camera session ready");
}
void CameraManager::onSessionActive(void* context, ACameraCaptureSession* session) {
LOGI("Camera session active");
}
void CameraManager::imageCallback(void* context, AImageReader* reader) {
auto* manager = static_cast<CameraManager*>(context);
AImage* image = nullptr;
media_status_t status = AImageReader_acquireLatestImage(reader, &image);
if (status != AMEDIA_OK || !image) {
return;
}
// Get image data
int32_t format;
AImage_getFormat(image, &format);
int32_t width, height;
AImage_getWidth(image, &width);
AImage_getHeight(image, &height);
int64_t timestamp;
AImage_getTimestamp(image, &timestamp);
uint8_t* data = nullptr;
int dataLength = 0;
// For YUV420 format, we need to get each plane
uint8_t* yPixel = nullptr;
uint8_t* uPixel = nullptr;
uint8_t* vPixel = nullptr;
int yLen = 0, uLen = 0, vLen = 0;
int yStride = 0, uStride = 0, vStride = 0;
AImage_getPlaneData(image, 0, &yPixel, &yLen);
AImage_getPlaneData(image, 1, &uPixel, &uLen);
AImage_getPlaneData(image, 2, &vPixel, &vLen);
AImage_getPlaneRowStride(image, 0, &yStride);
AImage_getPlaneRowStride(image, 1, &uStride);
AImage_getPlaneRowStride(image, 2, &vStride);
// Assuming a continuous buffer (might need to copy to a contiguous buffer)
int totalSize = yLen + uLen + vLen;
uint8_t* buffer = new uint8_t[totalSize];
// Copy Y plane
memcpy(buffer, yPixel, yLen);
// Copy U plane
memcpy(buffer + yLen, uPixel, uLen);
// Copy V plane
memcpy(buffer + yLen + uLen, vPixel, vLen);
// Process frame in callback
if (manager->mRunning && manager->mFrameCallback) {
manager->mFrameCallback(buffer, totalSize, width, height, timestamp);
}
delete[] buffer;
AImage_delete(image);
}

@ -0,0 +1,49 @@
#ifndef CAMERA_MANAGER_H
#define CAMERA_MANAGER_H
#include <camera/NdkCameraManager.h>
#include <camera/NdkCameraDevice.h>
#include <camera/NdkCameraMetadata.h>
#include <media/NdkImageReader.h>
#include <functional>
#include <string>
#include <vector>
#include <mutex>
#include <condition_variable>
class CameraManager {
public:
using FrameCallback = std::function<void(uint8_t*, size_t, int32_t, int32_t, int64_t)>;
CameraManager();
~CameraManager();
bool initialize();
bool openCamera(const char* cameraId);
bool startCapture(int width, int height, FrameCallback callback);
void stopCapture();
std::vector<std::string> getAvailableCameras();
private:
ACameraManager* mCameraManager = nullptr;
ACameraDevice* mCameraDevice = nullptr;
ACameraCaptureSession* mCaptureSession = nullptr;
ACameraOutputTarget* mOutputTarget = nullptr;
ACaptureRequest* mCaptureRequest = nullptr;
AImageReader* mImageReader = nullptr;
FrameCallback mFrameCallback;
std::mutex mMutex;
std::condition_variable mCondVar;
bool mRunning = false;
static void onDeviceDisconnected(void* context, ACameraDevice* device);
static void onDeviceError(void* context, ACameraDevice* device, int error);
static void onSessionClosed(void* context, ACameraCaptureSession* session);
static void onSessionReady(void* context, ACameraCaptureSession* session);
static void onSessionActive(void* context, ACameraCaptureSession* session);
static void imageCallback(void* context, AImageReader* reader);
};
#endif // CAMERA_MANAGER_H

@ -0,0 +1,165 @@
#include "encoder_manager.h"
#include <android/log.h>
#include <media/NdkMediaCodec.h>
#define LOG_TAG "EncoderManager"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
EncoderManager::EncoderManager() {}
EncoderManager::~EncoderManager() {
stop();
}
bool EncoderManager::initialize(int width, int height, int bitrate, int frameRate, EncodedFrameCallback callback) {
mWidth = width;
mHeight = height;
mBitrate = bitrate;
mFrameRate = frameRate;
mCallback = callback;
// Create H.264 encoder
mCodec = AMediaCodec_createEncoderByType("video/avc");
if (!mCodec) {
LOGE("Failed to create H.264 encoder");
return false;
}
// Configure encoder
AMediaFormat* format = AMediaFormat_new();
AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, "video/avc");
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_WIDTH, width);
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_HEIGHT, height);
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_BIT_RATE, bitrate);
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_FRAME_RATE, frameRate);
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_I_FRAME_INTERVAL, 1); // Key frame every second
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_COLOR_FORMAT, 21); // COLOR_FormatYUV420SemiPlanar
media_status_t status = AMediaCodec_configure(
mCodec, format, nullptr, nullptr, AMEDIACODEC_CONFIGURE_FLAG_ENCODE);
AMediaFormat_delete(format);
if (status != AMEDIA_OK) {
LOGE("Failed to configure encoder: %d", status);
AMediaCodec_delete(mCodec);
mCodec = nullptr;
return false;
}
// Start encoder
status = AMediaCodec_start(mCodec);
if (status != AMEDIA_OK) {
LOGE("Failed to start encoder: %d", status);
AMediaCodec_delete(mCodec);
mCodec = nullptr;
return false;
}
mRunning = true;
// Start output processing thread
mOutputThread = std::thread(&EncoderManager::outputLoop, this);
return true;
}
bool EncoderManager::encode(uint8_t* yuvData, size_t dataSize, int64_t presentationTimeUs) {
if (!mRunning || !mCodec) {
return false;
}
// Get input buffer index with timeout
ssize_t inputBufferIndex = AMediaCodec_dequeueInputBuffer(mCodec, 10000);
if (inputBufferIndex < 0) {
LOGE("Failed to get input buffer: %zd", inputBufferIndex);
return false;
}
// Get input buffer and its size
size_t inputBufferSize;
uint8_t* inputBuffer = AMediaCodec_getInputBuffer(mCodec, inputBufferIndex, &inputBufferSize);
if (!inputBuffer) {
LOGE("Failed to get input buffer pointer");
return false;
}
// Make sure our data fits in the buffer
size_t toCopy = std::min(dataSize, inputBufferSize);
memcpy(inputBuffer, yuvData, toCopy);
// Queue the input buffer with timestamp
media_status_t status = AMediaCodec_queueInputBuffer(
mCodec, inputBufferIndex, 0, toCopy, presentationTimeUs, 0);
if (status != AMEDIA_OK) {
LOGE("Failed to queue input buffer: %d", status);
return false;
}
return true;
}
void EncoderManager::stop() {
if (mRunning) {
mRunning = false;
if (mOutputThread.joinable()) {
mOutputThread.join();
}
if (mCodec) {
AMediaCodec_stop(mCodec);
AMediaCodec_delete(mCodec);
mCodec = nullptr;
}
}
}
void EncoderManager::outputLoop() {
AMediaCodecBufferInfo bufferInfo;
while (mRunning) {
// Dequeue output buffer with timeout
ssize_t outputBufferIndex = AMediaCodec_dequeueOutputBuffer(mCodec, &bufferInfo, 10000);
if (outputBufferIndex >= 0) {
// Get output buffer
size_t outputBufferSize;
uint8_t* outputBuffer = AMediaCodec_getOutputBuffer(mCodec, outputBufferIndex, &outputBufferSize);
if (outputBuffer && bufferInfo.size > 0 && mCallback) {
// Determine if it's a key frame
bool isKeyFrame = (bufferInfo.flags & 1) != 0;
// Copy encoded data to a new buffer
uint8_t* data = new uint8_t[bufferInfo.size];
memcpy(data, outputBuffer + bufferInfo.offset, bufferInfo.size);
// Prepare frame and send via callback
EncodedFrame frame;
frame.data = data;
frame.size = bufferInfo.size;
frame.presentationTimeUs = bufferInfo.presentationTimeUs;
frame.isKeyFrame = isKeyFrame;
mCallback(frame);
delete[] data;
}
// Release the output buffer
AMediaCodec_releaseOutputBuffer(mCodec, outputBufferIndex, false);
} else if (outputBufferIndex == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) {
// Format changed - could extract codec specific data here if needed
AMediaFormat* format = AMediaCodec_getOutputFormat(mCodec);
AMediaFormat_delete(format);
} else if (outputBufferIndex == AMEDIACODEC_INFO_TRY_AGAIN_LATER) {
// No output available yet - just continue
std::this_thread::sleep_for(std::chrono::milliseconds(5));
} else {
LOGE("Unexpected output buffer index: %zd", outputBufferIndex);
}
}
}

@ -0,0 +1,44 @@
#ifndef ENCODER_MANAGER_H
#define ENCODER_MANAGER_H
#include <media/NdkMediaCodec.h>
#include <functional>
#include <thread>
#include <mutex>
#include <condition_variable>
#include <queue>
#include <atomic>
struct EncodedFrame {
uint8_t* data;
size_t size;
int64_t presentationTimeUs;
bool isKeyFrame;
};
class EncoderManager {
public:
using EncodedFrameCallback = std::function<void(const EncodedFrame&)>;
EncoderManager();
~EncoderManager();
bool initialize(int width, int height, int bitrate, int frameRate, EncodedFrameCallback callback);
bool encode(uint8_t* yuvData, size_t dataSize, int64_t presentationTimeUs);
void stop();
private:
AMediaCodec* mCodec = nullptr;
int mWidth = 0;
int mHeight = 0;
int mBitrate = 0;
int mFrameRate = 0;
EncodedFrameCallback mCallback;
std::atomic<bool> mRunning{false};
std::thread mOutputThread;
void outputLoop();
};
#endif // ENCODER_MANAGER_H

@ -0,0 +1,280 @@
#include <jni.h>
#include <string>
#include <thread>
#include <chrono>
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <pthread.h>
#include <semaphore.h>
#include <android/log.h>
// #define USING_MULTI_CAMS
#ifdef USING_FFMPEG
extern "C" {
#include <libavformat/avformat.h>
}
#endif
#include "camera_manager.h"
#include "encoder_manager.h"
#include "rtsp_streamer.h"
#include "Utils.h"
#define TAG "CAM2RTSP"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, TAG, __VA_ARGS__)
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG, TAG, __VA_ARGS__)
#define LOGW(...) __android_log_print(ANDROID_LOG_WARN, TAG, __VA_ARGS__)
// Global variables
static std::unique_ptr<CameraManager> gCameraManager;
static std::unique_ptr<EncoderManager> gEncoderManager;
static std::unique_ptr<RtspStreamer> gRtspStreamer;
static bool gIsRunning = false;
static int gWidth = 1280;
static int gHeight = 720;
static int gFps = 30;
static int gBitrate = 2000000; // 2 Mbps
static std::string gRtspUrl;
// Frame processing callback
void onFrameEncoded(const EncodedFrame& frame) {
if (gRtspStreamer) {
gRtspStreamer->sendFrame(frame);
}
}
// Camera frame callback
void onCameraFrame(uint8_t* data, size_t size, int32_t width, int32_t height, int64_t timestamp) {
if (gEncoderManager) {
// YUV420 conversion to NV12 might be needed depending on camera format
uint8_t* nv12Data = new uint8_t[size];
utils::YUV420ToNV12(data, nv12Data, width, height);
gEncoderManager->encode(nv12Data, size, timestamp);
delete[] nv12Data;
}
}
void ffmpeg_log_callback(void *ptr, int level, const char *fmt, va_list vl) {
// Map FFmpeg log levels to Android log levels
int android_log_level;
switch (level) {
case AV_LOG_PANIC:
case AV_LOG_FATAL:
android_log_level = ANDROID_LOG_FATAL;
break;
case AV_LOG_ERROR:
android_log_level = ANDROID_LOG_ERROR;
break;
case AV_LOG_WARNING:
android_log_level = ANDROID_LOG_WARN;
break;
case AV_LOG_INFO:
android_log_level = ANDROID_LOG_INFO;
break;
case AV_LOG_VERBOSE:
android_log_level = ANDROID_LOG_VERBOSE;
break;
case AV_LOG_DEBUG:
case AV_LOG_TRACE:
android_log_level = ANDROID_LOG_DEBUG;
break;
default:
android_log_level = ANDROID_LOG_INFO;
break;
}
// Format the log message
char log_message[1024];
vsnprintf(log_message, sizeof(log_message), fmt, vl);
if (android_log_level < AV_LOG_VERBOSE )
{
// Send the log message to logcat
__android_log_print(android_log_level, "FFmpeg", "%s", log_message);
}
}
jint JNI_OnLoad(JavaVM* vm, void* reserved)
{
JNIEnv* env = NULL;
jint result = -1;
// 在 JNI_OnLoad 或其他初始化函数中注册
#if 0
signal(SIGSEGV, sighandler);
#endif
#if defined(JNI_VERSION_1_6)
if (result==-1 && vm->GetEnv((void**)&env, JNI_VERSION_1_6) == JNI_OK)
{
result = JNI_VERSION_1_6;
}
#endif
#if defined(JNI_VERSION_1_4)
if (result==-1 && vm->GetEnv((void**)&env, JNI_VERSION_1_4) == JNI_OK)
{
result = JNI_VERSION_1_4;
}
#endif
#if defined(JNI_VERSION_1_2)
if (result==-1 && vm->GetEnv((void**)&env, JNI_VERSION_1_2) == JNI_OK)
{
result = JNI_VERSION_1_2;
}
#endif
if(result == -1 || env == NULL)
{
return JNI_FALSE;
}
// curl_global_init(CURL_GLOBAL_ALL);
#ifdef USING_FFMPEG
// Initialize FFmpeg
#if LIBAVFORMAT_VERSION_INT < AV_VERSION_INT(58, 9, 100)
av_register_all();
#endif
avformat_network_init();
#ifndef NDEBUG
// Set the custom log callback
av_log_set_level(AV_LOG_INFO);
av_log_set_callback(ffmpeg_log_callback);
// av_log(NULL, AV_LOG_INFO, "Testing FFmpeg logging from JNI_OnLoad");
#endif
#endif
return result;
}
JNIEXPORT void JNICALL JNI_OnUnload(JavaVM* vm, void* reserved)
{
// curl_global_cleanup();
#ifdef USING_FFMPEG
#if LIBAVFORMAT_VERSION_INT < AV_VERSION_INT(58, 9, 100)
av_unregister_all();
#endif
avformat_network_deinit();
#endif
}
extern "C" JNIEXPORT jint JNICALL
Java_com_xypower_dblstreams_MainActivity_startPlayback(
JNIEnv* env, jobject pThis) {
if (gIsRunning) {
LOGI("Streaming already running");
return JNI_TRUE;
}
// Get RTSP URL
gRtspUrl = "rtsp://61.169.135.146:1554/live/11";
gWidth = 720;
gHeight = 480;
gFps = 15;
gBitrate = 2048*1024;
LOGI("Starting streaming: %s (%dx%d @ %dfps)", gRtspUrl.c_str(), gWidth, gHeight, gFps);
// Initialize RTSP streamer
gRtspStreamer = std::make_unique<RtspStreamer>();
if (!gRtspStreamer->initialize(gRtspUrl, gWidth, gHeight, gFps)) {
LOGE("Failed to initialize RTSP streamer");
return JNI_FALSE;
}
// Initialize encoder
gEncoderManager = std::make_unique<EncoderManager>();
if (!gEncoderManager->initialize(gWidth, gHeight, gBitrate, gFps, onFrameEncoded)) {
LOGE("Failed to initialize encoder");
return JNI_FALSE;
}
// Initialize camera
gCameraManager = std::make_unique<CameraManager>();
if (!gCameraManager->initialize()) {
LOGE("Failed to initialize camera");
return JNI_FALSE;
}
// Get available cameras
auto cameras = gCameraManager->getAvailableCameras();
if (cameras.empty()) {
LOGE("No cameras available");
return JNI_FALSE;
}
// Open first available camera (usually back camera)
if (!gCameraManager->openCamera(cameras[0].c_str())) {
LOGE("Failed to open camera");
return JNI_FALSE;
}
// Start camera capture
if (!gCameraManager->startCapture(gWidth, gHeight, onCameraFrame)) {
LOGE("Failed to start camera capture");
return JNI_FALSE;
}
gIsRunning = true;
return JNI_TRUE;
}
extern "C" JNIEXPORT jint JNICALL
Java_com_xypower_dblstreams_MainActivity_startRtmpPlayback(
JNIEnv* env, jobject pThis) {
return 0;
}
extern "C" JNIEXPORT void JNICALL
Java_com_xypower_dblstreams_MainActivity_stopPlayback(
JNIEnv* env, jobject pThis) {
if (!gIsRunning) {
return;
}
LOGI("Stopping streaming");
// Stop and clean up camera
if (gCameraManager) {
gCameraManager->stopCapture();
gCameraManager.reset();
}
// Stop and clean up encoder
if (gEncoderManager) {
gEncoderManager->stop();
gEncoderManager.reset();
}
// Stop and clean up RTSP streamer
if (gRtspStreamer) {
gRtspStreamer->stop();
gRtspStreamer.reset();
}
gIsRunning = false;
}

@ -0,0 +1,390 @@
#include "rtmp_streamer.h"
#include <string.h>
#include <stdlib.h>
#include <stdio.h>
#include <android/log.h>
// FFmpeg 4.4.5 includes
extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/avutil.h>
#include <libavutil/opt.h>
#include <libavutil/time.h>
}
// Verify we're using FFmpeg 4.4.5
#if LIBAVFORMAT_VERSION_MAJOR != 58 || LIBAVFORMAT_VERSION_MINOR != 76
#warning "This code is optimized for FFmpeg 4.4.5 (libavformat 58.76.100)"
#endif
void debug_save_stream(const uint8_t* data, size_t size, bool is_keyframe, bool is_converted) {
static FILE* raw_file = NULL;
static FILE* converted_file = NULL;
if (!raw_file) raw_file = fopen("/sdcard/rtmp_raw.h264", "wb");
if (!converted_file) converted_file = fopen("/sdcard/rtmp_converted.h264", "wb");
FILE* target = is_converted ? converted_file : raw_file;
if (target) {
if (is_keyframe) {
uint8_t marker[4] = {0, 0, 0, 0}; // Visual marker for keyframes
fwrite(marker, 1, 4, target);
}
fwrite(data, 1, size, target);
fflush(target);
}
}
// Add this function to convert Annex B to AVCC format
bool convert_annexb_to_avcc(const uint8_t* annexb_data, size_t annexb_size,
uint8_t** avcc_data, size_t* avcc_size) {
// Count NAL units and calculate required size
size_t total_size = 0;
int nal_count = 0;
for (size_t i = 0; i < annexb_size - 3; i++) {
// Find start code
if ((annexb_data[i] == 0 && annexb_data[i+1] == 0 && annexb_data[i+2] == 0 && annexb_data[i+3] == 1) ||
(annexb_data[i] == 0 && annexb_data[i+1] == 0 && annexb_data[i+2] == 1)) {
nal_count++;
}
}
// Allocate output buffer (estimate size)
*avcc_data = (uint8_t*)malloc(annexb_size + nal_count*4);
uint8_t* out = *avcc_data;
*avcc_size = 0;
// Convert each NAL unit
for (size_t i = 0; i < annexb_size;) {
// Find start code
if ((i+3 < annexb_size && annexb_data[i] == 0 && annexb_data[i+1] == 0 &&
annexb_data[i+2] == 0 && annexb_data[i+3] == 1) ||
(i+2 < annexb_size && annexb_data[i] == 0 && annexb_data[i+1] == 0 &&
annexb_data[i+2] == 1)) {
int start_code_size = (annexb_data[i+2] == 1) ? 3 : 4;
i += start_code_size;
// Find next start code
size_t j = i;
while (j < annexb_size - 3) {
if ((annexb_data[j] == 0 && annexb_data[j+1] == 0 && annexb_data[j+2] == 1) ||
(annexb_data[j] == 0 && annexb_data[j+1] == 0 &&
annexb_data[j+2] == 0 && annexb_data[j+3] == 1)) {
break;
}
j++;
}
// NAL unit size
size_t nal_size = j - i;
// Write length prefix (4 bytes)
*out++ = (nal_size >> 24) & 0xff;
*out++ = (nal_size >> 16) & 0xff;
*out++ = (nal_size >> 8) & 0xff;
*out++ = nal_size & 0xff;
// Copy NAL unit
memcpy(out, annexb_data + i, nal_size);
out += nal_size;
*avcc_size += nal_size + 4;
i = j;
} else {
i++;
}
}
return true;
}
bool rtmp_streamer_init(RtspStreamer* streamer, const char* rtmpUrl,
int width, int height, int bitrate, int frameRate,
const uint8_t* sps, size_t spsSize,
const uint8_t* pps, size_t ppsSize) {
// Check protocol support
AVOutputFormat* outfmt = av_guess_format("rtmp", NULL, NULL);
if (!outfmt) {
__android_log_print(ANDROID_LOG_ERROR, "FFmpeg", "RTMP protocol not supported in this FFmpeg build!");
} else {
__android_log_print(ANDROID_LOG_INFO, "FFmpeg", "RTMP format supported");
}
// List available protocols
void *opaque = NULL;
const char *name = NULL;
__android_log_print(ANDROID_LOG_INFO, "FFmpeg", "Available output protocols:");
while ((name = avio_enum_protocols(&opaque, 1))) {
__android_log_print(ANDROID_LOG_INFO, "FFmpeg", " %s", name);
}
memset(streamer, 0, sizeof(RtspStreamer));
streamer->width = width;
streamer->height = height;
streamer->bitrate = bitrate;
streamer->frameRate = frameRate;
streamer->rtspUrl = strdup(rtmpUrl); // Keep the field name for now
// Allocate output format context
int ret = avformat_alloc_output_context2(&streamer->formatCtx, NULL, "flv", rtmpUrl);
if (ret < 0 || !streamer->formatCtx) {
fprintf(stderr, "Could not create output context, error: %d\n", ret);
return false;
}
// Find H.264 encoder
const AVCodec* codec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!codec) {
fprintf(stderr, "Could not find H.264 encoder\n");
return false;
}
// Create video stream
streamer->stream = avformat_new_stream(streamer->formatCtx, NULL);
if (!streamer->stream) {
fprintf(stderr, "Could not create video stream\n");
return false;
}
streamer->stream->id = streamer->formatCtx->nb_streams - 1;
// Initialize codec context
streamer->codecCtx = avcodec_alloc_context3(codec);
if (!streamer->codecCtx) {
fprintf(stderr, "Could not allocate codec context\n");
return false;
}
// Set codec parameters
streamer->codecCtx->codec_id = AV_CODEC_ID_H264;
streamer->codecCtx->codec_type = AVMEDIA_TYPE_VIDEO;
streamer->codecCtx->width = width;
streamer->codecCtx->height = height;
streamer->codecCtx->pix_fmt = AV_PIX_FMT_YUV420P;
streamer->codecCtx->bit_rate = bitrate;
streamer->codecCtx->time_base.num = 1;
streamer->codecCtx->time_base.den = frameRate;
// Use h264_passthrough as we'll receive pre-encoded H.264 data
streamer->codecCtx->codec_tag = 0;
// ADD THIS CODE - Create extradata with SPS/PPS in AVCC format (required by RTMP)
size_t extradata_size = 8 + spsSize + 3 + ppsSize;
streamer->codecCtx->extradata = (uint8_t*)av_malloc(extradata_size + AV_INPUT_BUFFER_PADDING_SIZE);
if (!streamer->codecCtx->extradata) {
fprintf(stderr, "Failed to allocate extradata\n");
return false;
}
memset(streamer->codecCtx->extradata, 0, extradata_size + AV_INPUT_BUFFER_PADDING_SIZE);
if (!sps || spsSize < 4) {
__android_log_print(ANDROID_LOG_ERROR, "RTMP", "Invalid SPS: %p, size: %zu", sps, spsSize);
return false;
}
if (!pps || ppsSize < 1) {
__android_log_print(ANDROID_LOG_ERROR, "RTMP", "Invalid PPS: %p, size: %zu", pps, ppsSize);
return false;
}
// Format extradata as AVCC (needed by RTMP)
uint8_t* p = streamer->codecCtx->extradata;
*p++ = 1; // version
*p++ = sps[1]; // profile
*p++ = sps[2]; // profile compat
*p++ = sps[3]; // level
*p++ = 0xff; // 6 bits reserved + 2 bits NAL size length - 1 (3)
*p++ = 0xe1; // 3 bits reserved + 5 bits number of SPS (1)
// SPS length and data
*p++ = (spsSize >> 8) & 0xff;
*p++ = spsSize & 0xff;
memcpy(p, sps, spsSize);
p += spsSize;
// Number of PPS
*p++ = 1;
// PPS length and data
*p++ = (ppsSize >> 8) & 0xff;
*p++ = ppsSize & 0xff;
memcpy(p, pps, ppsSize);
streamer->codecCtx->extradata_size = extradata_size;
// END OF ADDITION
// Use h264_passthrough as we'll receive pre-encoded H.264 data
streamer->codecCtx->codec_tag = 0;
// Copy parameters to stream
ret = avcodec_parameters_from_context(streamer->stream->codecpar, streamer->codecCtx);
if (ret < 0) {
fprintf(stderr, "Could not copy codec parameters to stream, error: %d\n", ret);
return false;
}
// Set stream timebase
streamer->stream->time_base = streamer->codecCtx->time_base;
// Some formats want stream headers to be separate
if (streamer->formatCtx->oformat->flags & AVFMT_GLOBALHEADER) {
streamer->codecCtx->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
}
// In rtmp_streamer_init, update the options dictionary:
AVDictionary* options = NULL;
av_dict_set(&options, "flvflags", "no_duration_filesize", 0);
av_dict_set(&options, "tune", "zerolatency", 0);
av_dict_set(&options, "preset", "ultrafast", 0);
av_dict_set(&options, "fflags", "nobuffer", 0);
av_dict_set(&options, "rw_timeout", "8000000", 0); // 8 second timeout (ZLM default)
av_dict_set(&options, "buffer_size", "32768", 0); // Larger buffer for stability
av_dict_set(&options, "flush_packets", "1", 0); // Force packet flushing
// Add before avio_open2
av_dump_format(streamer->formatCtx, 0, rtmpUrl, 1);
// Open output URL
ret = avio_open2(&streamer->formatCtx->pb, rtmpUrl, AVIO_FLAG_WRITE, NULL, &options);
if (ret < 0) {
char err_buf[AV_ERROR_MAX_STRING_SIZE] = {0};
av_strerror(ret, err_buf, AV_ERROR_MAX_STRING_SIZE);
fprintf(stderr, "Could not open output URL '%s', error: %s\n", rtmpUrl, err_buf);
return false;
}
// Write stream header
ret = avformat_write_header(streamer->formatCtx, &options);
if (ret < 0) {
char err_buf[AV_ERROR_MAX_STRING_SIZE] = {0};
av_strerror(ret, err_buf, AV_ERROR_MAX_STRING_SIZE);
fprintf(stderr, "Error writing header: %s\n", err_buf);
return false;
}
// Allocate packet
streamer->packet = av_packet_alloc();
if (!streamer->packet) {
fprintf(stderr, "Could not allocate packet\n");
return false;
}
streamer->isConnected = true;
streamer->startTime = av_gettime();
streamer->frameCount = 0;
return true;
}
bool rtmp_streamer_send_h264(RtspStreamer* streamer, const uint8_t* data,
size_t dataLength, int64_t pts, bool isKeyFrame) {
if (!streamer || !streamer->isConnected || !data || dataLength == 0) {
return false;
}
static FILE* debug_file = NULL;
if (!debug_file) {
debug_file = fopen("/sdcard/com.xypower.mpapp/tmp/rtmp_debug.h264", "wb");
}
if (debug_file) {
fwrite(data, 1, dataLength, debug_file);
fflush(debug_file);
}
// Convert Annex B (start code) format to AVCC (length prefix) format
// RTMP requires AVCC format for H.264 data
uint8_t* avcc_data = NULL;
size_t avcc_size = 0;
if (!convert_annexb_to_avcc(data, dataLength, &avcc_data, &avcc_size)) {
__android_log_print(ANDROID_LOG_ERROR, "RTMP", "Failed to convert H.264 to AVCC format");
return false;
}
// Log frame info
__android_log_print(ANDROID_LOG_VERBOSE, "RTMP", "Sending frame: %zu bytes, keyframe: %d",
avcc_size, isKeyFrame ? 1 : 0);
// Reset packet
av_packet_unref(streamer->packet);
// Copy encoded data to packet buffer (use converted AVCC data)
uint8_t* buffer = (uint8_t*)av_malloc(avcc_size);
if (!buffer) {
__android_log_print(ANDROID_LOG_ERROR, "RTMP", "Failed to allocate memory for packet data");
free(avcc_data);
return false;
}
memcpy(buffer, avcc_data, avcc_size);
free(avcc_data); // Free the converted data
// Set up packet with AVCC formatted data
streamer->packet->data = buffer;
streamer->packet->size = avcc_size;
// RTMP timestamp handling
if (pts == 0) {
pts = av_gettime() - streamer->startTime;
}
// Convert to stream time_base for FLV/RTMP
// ZLMediaKit requires strictly monotonic timestamps
static int64_t last_dts = 0;
// Calculate timestamp in milliseconds
int64_t timestamp_ms = pts ? pts / 1000 : (av_gettime() - streamer->startTime) / 1000;
// Convert to stream timebase
int64_t ts_in_stream_tb = av_rescale_q(timestamp_ms,
(AVRational){1, 1000}, // millisecond timebase
streamer->stream->time_base);
// Ensure monotonically increasing timestamps
if (ts_in_stream_tb <= last_dts) {
ts_in_stream_tb = last_dts + 1;
}
last_dts = ts_in_stream_tb;
// Set both PTS and DTS
streamer->packet->pts = ts_in_stream_tb;
streamer->packet->dts = ts_in_stream_tb;
streamer->packet->duration = av_rescale_q(1,
(AVRational){1, streamer->frameRate},
streamer->stream->time_base);
// Set key frame flag - especially important for RTMP
// In rtmp_streamer_send_h264, enhance keyframe logging:
if (isKeyFrame) {
streamer->packet->flags |= AV_PKT_FLAG_KEY;
__android_log_print(ANDROID_LOG_INFO, "RTMP",
"Sending keyframe (size: %zu, pts: %lld)", avcc_size, streamer->packet->pts);
}
streamer->packet->stream_index = streamer->stream->index;
// Write packet
int ret = av_interleaved_write_frame(streamer->formatCtx, streamer->packet);
av_free(buffer); // Free allocated buffer
if (ret < 0) {
char err_buf[AV_ERROR_MAX_STRING_SIZE] = {0};
av_strerror(ret, err_buf, AV_ERROR_MAX_STRING_SIZE);
__android_log_print(ANDROID_LOG_ERROR, "RTMP", "Error writing frame: %s", err_buf);
return false;
}
streamer->frameCount++;
return true;
}

@ -0,0 +1,48 @@
#ifndef RTSP_STREAMER_H
#define RTSP_STREAMER_H
extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/avutil.h>
#include <libavutil/time.h>
}
#include <string>
#include <thread>
#include <mutex>
#include <condition_variable>
#include <queue>
#include <atomic>
#include "camera_manager.h"
class RtspStreamer {
public:
RtspStreamer();
~RtspStreamer();
bool initialize(const std::string& url, int width, int height, int fps);
bool sendFrame(const EncodedFrame& frame);
void stop();
private:
std::string mUrl;
AVFormatContext* mFormatCtx = nullptr;
AVStream* mVideoStream = nullptr;
AVCodecContext* mCodecCtx = nullptr;
AVPacket* mPacket = nullptr;
int64_t mStartTime = 0;
std::atomic<bool> mRunning{false};
std::queue<EncodedFrame> mFrameQueue;
std::mutex mQueueMutex;
std::condition_variable mQueueCond;
std::thread mStreamThread;
void extractH264Parameters(const uint8_t* data, size_t size, uint8_t** sps, size_t* spsSize, uint8_t** pps, size_t* ppsSize);
void streamLoop();
};
#endif // RTSP_STREAMER_H

@ -0,0 +1,265 @@
#include "rtsp_streamer.h"
#include <android/log.h>
#include <cstring>
#define LOG_TAG "RtspStreamer"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
// Include the encoder_manager.h to get the EncodedFrame structure
#include "encoder_manager.h"
// Define key frame flag constant
#define BUFFER_FLAG_KEY_FRAME 1
RtspStreamer::RtspStreamer() : mPacket(nullptr), mFormatCtx(nullptr),
mVideoStream(nullptr), mCodecCtx(nullptr),
mStartTime(0), mRunning(false) {}
RtspStreamer::~RtspStreamer() {
stop();
}
bool RtspStreamer::initialize(const std::string& url, int width, int height, int fps) {
mUrl = url;
// Initialize FFmpeg
avformat_network_init();
// Allocate format context
int ret = avformat_alloc_output_context2(&mFormatCtx, nullptr, "rtsp", url.c_str());
if (ret < 0 || !mFormatCtx) {
char errbuf[AV_ERROR_MAX_STRING_SIZE] = {0};
av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE);
LOGE("Could not allocate output format context: %s", errbuf);
return false;
}
// Set RTSP options
av_dict_set(&mFormatCtx->metadata, "rtsp_transport", "tcp", 0); // Use TCP for more reliability
mFormatCtx->oformat->flags |= AVFMT_FLAG_NONBLOCK;
// Find the H.264 encoder
const AVCodec* codec = avcodec_find_encoder(AV_CODEC_ID_H264);
if (!codec) {
LOGE("Could not find H.264 encoder");
return false;
}
// Create video stream
mVideoStream = avformat_new_stream(mFormatCtx, nullptr);
if (!mVideoStream) {
LOGE("Could not create video stream");
return false;
}
mVideoStream->id = mFormatCtx->nb_streams - 1;
// Set stream parameters
AVCodecParameters* codecpar = mVideoStream->codecpar;
codecpar->codec_id = AV_CODEC_ID_H264;
codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
codecpar->width = width;
codecpar->height = height;
codecpar->format = AV_PIX_FMT_YUV420P;
codecpar->bit_rate = 2000000; // 2 Mbps
// Stream timebase (90kHz is standard for H.264 in RTSP)
mVideoStream->time_base = (AVRational){1, 90000};
// Open output URL
if (!(mFormatCtx->oformat->flags & AVFMT_NOFILE)) {
ret = avio_open(&mFormatCtx->pb, url.c_str(), AVIO_FLAG_WRITE);
if (ret < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE] = {0};
av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE);
LOGE("Could not open output URL: %s, error: %s", url.c_str(), errbuf);
return false;
}
}
// Write stream header
AVDictionary* opts = nullptr;
av_dict_set(&opts, "rtsp_transport", "tcp", 0);
ret = avformat_write_header(mFormatCtx, &opts);
if (ret < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE] = {0};
av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE);
LOGE("Error writing header: %s", errbuf);
return false;
}
av_dict_free(&opts);
// Allocate packet
mPacket = av_packet_alloc();
if (!mPacket) {
LOGE("Could not allocate packet");
return false;
}
mRunning = true;
mStartTime = av_gettime();
// Start streaming thread
mStreamThread = std::thread(&RtspStreamer::streamLoop, this);
LOGI("RTSP streamer initialized successfully to %s", url.c_str());
return true;
}
bool RtspStreamer::sendFrame(const EncodedFrame& frame) {
if (!mRunning) {
return false;
}
// Make a copy of the frame
EncodedFrame frameCopy;
frameCopy.size = frame.size;
frameCopy.presentationTimeUs = frame.presentationTimeUs;
frameCopy.isKeyFrame = frame.isKeyFrame;
// Copy the data
frameCopy.data = new uint8_t[frame.size];
memcpy(frameCopy.data, frame.data, frame.size);
// Add the frame to queue
{
std::lock_guard<std::mutex> lock(mQueueMutex);
mFrameQueue.push(frameCopy);
}
mQueueCond.notify_one();
return true;
}
void RtspStreamer::stop() {
if (mRunning) {
mRunning = false;
// Wake up streaming thread
mQueueCond.notify_all();
if (mStreamThread.joinable()) {
mStreamThread.join();
}
// Clean up frames in queue
{
std::lock_guard<std::mutex> lock(mQueueMutex);
while (!mFrameQueue.empty()) {
EncodedFrame& frame = mFrameQueue.front();
delete[] frame.data;
mFrameQueue.pop();
}
}
// Write trailer and close
if (mFormatCtx) {
if (mFormatCtx->pb) {
av_write_trailer(mFormatCtx);
}
if (!(mFormatCtx->oformat->flags & AVFMT_NOFILE) && mFormatCtx->pb) {
avio_close(mFormatCtx->pb);
}
avformat_free_context(mFormatCtx);
mFormatCtx = nullptr;
}
if (mCodecCtx) {
avcodec_free_context(&mCodecCtx);
}
if (mPacket) {
av_packet_free(&mPacket);
}
LOGI("RTSP streamer stopped");
}
}
void RtspStreamer::streamLoop() {
bool firstFrame = true;
int64_t firstPts = 0;
while (mRunning) {
EncodedFrame frame;
bool hasFrame = false;
// Get frame from queue
{
std::unique_lock<std::mutex> lock(mQueueMutex);
if (mFrameQueue.empty()) {
// Wait for new frame or stop signal
mQueueCond.wait_for(lock, std::chrono::milliseconds(100));
continue;
}
frame = mFrameQueue.front();
mFrameQueue.pop();
hasFrame = true;
}
if (hasFrame) {
// Reset the packet
av_packet_unref(mPacket);
// Save first timestamp for offset calculation
if (firstFrame) {
firstPts = frame.presentationTimeUs;
firstFrame = false;
}
// Create a copy of the frame data that FFmpeg will manage
uint8_t* buffer = (uint8_t*)av_malloc(frame.size);
if (!buffer) {
LOGE("Failed to allocate buffer for frame");
delete[] frame.data; // Free our copy
continue;
}
// Copy frame data to the FFmpeg-managed buffer
memcpy(buffer, frame.data, frame.size);
// We can now free our copy of the data
delete[] frame.data;
frame.data = nullptr; // Avoid accidental double-delete
// Let FFmpeg manage the buffer
int ret = av_packet_from_data(mPacket, buffer, frame.size);
if (ret < 0) {
LOGE("Failed to create packet from data: %d", ret);
av_free(buffer); // Free FFmpeg buffer on error
continue;
}
// Now mPacket owns the buffer, we don't need to free it manually
// Offset timestamp by first frame for proper timing
int64_t pts = frame.presentationTimeUs - firstPts;
// Convert to stream timebase (90kHz)
pts = av_rescale_q(pts, (AVRational){1, 1000000}, mVideoStream->time_base);
// Set packet properties
mPacket->pts = pts;
mPacket->dts = pts;
mPacket->duration = 0;
mPacket->flags = frame.isKeyFrame ? AV_PKT_FLAG_KEY : 0;
mPacket->stream_index = mVideoStream->index;
// Write packet
ret = av_interleaved_write_frame(mFormatCtx, mPacket);
if (ret < 0) {
char errbuf[AV_ERROR_MAX_STRING_SIZE] = {0};
av_strerror(ret, errbuf, AV_ERROR_MAX_STRING_SIZE);
LOGE("Error writing frame: %d (%s)", ret, errbuf);
// Handle reconnection logic as before...
}
// We don't need to delete frame.data here anymore - it's already been freed above
// and ownership of the buffer has been transferred to FFmpeg
}
}
}

@ -0,0 +1,48 @@
#ifndef RTSP_STREAMER_H
#define RTSP_STREAMER_H
extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/avutil.h>
#include <libavutil/time.h>
}
#include <string>
#include <thread>
#include <mutex>
#include <condition_variable>
#include <queue>
#include <atomic>
#include "encoder_manager.h"
class RtspStreamer {
public:
RtspStreamer();
~RtspStreamer();
bool initialize(const std::string& url, int width, int height, int fps);
bool sendFrame(const EncodedFrame& frame);
void stop();
private:
std::string mUrl;
AVFormatContext* mFormatCtx = nullptr;
AVStream* mVideoStream = nullptr;
AVCodecContext* mCodecCtx = nullptr;
AVPacket* mPacket = nullptr;
int64_t mStartTime = 0;
std::atomic<bool> mRunning{false};
std::queue<EncodedFrame> mFrameQueue;
std::mutex mQueueMutex;
std::condition_variable mQueueCond;
std::thread mStreamThread;
void extractH264Parameters(const uint8_t* data, size_t size, uint8_t** sps, size_t* spsSize, uint8_t** pps, size_t* ppsSize);
void streamLoop();
};
#endif // RTSP_STREAMER_H

@ -0,0 +1,69 @@
package com.xypower.dblstreams;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import androidx.appcompat.app.AppCompatActivity;
public class MainActivity extends AppCompatActivity {
static {
// 加载本地库(对应 libnative-lib.so
System.loadLibrary("dblstreams");
}
private static final String TAG = "DualStreamingActivity";
private static final int REQUEST_CAMERA_PERMISSION = 200;
private native int startPlayback();
private native int startRtmpPlayback();
private native int stopPlayback();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Button btnStop = (Button)findViewById(R.id.btnStopStreaming);
btnStop.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
stopPlayback();
}
});
// 初始化UI
Thread th = new Thread(new Runnable() {
@Override
public void run() {
startPlayback();
}
});
th.start();
}
@Override
protected void onResume() {
super.onResume();
// 摄像头只有在开始推流时才会打开,这里不需要额外操作
}
@Override
protected void onPause() {
// 如果Activity暂停停止推流
super.onPause();
}
@Override
protected void onDestroy() {
// 确保释放资源
super.onDestroy();
}
}

@ -0,0 +1,170 @@
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillColor="#3DDC84"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
</vector>

@ -0,0 +1,30 @@
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
<aapt:attr name="android:fillColor">
<gradient
android:endX="85.84757"
android:endY="92.4963"
android:startX="42.9492"
android:startY="49.59793"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>

@ -0,0 +1,37 @@
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical"
android:gravity="center">
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="双路推流"
android:textSize="24sp"
android:layout_marginBottom="30dp"/>
<Button
android:id="@+id/btnStartStreaming"
android:layout_width="200dp"
android:layout_height="wrap_content"
android:text="开始推流"
android:layout_marginBottom="20dp"/>
<Button
android:id="@+id/btnStopStreaming"
android:layout_width="200dp"
android:layout_height="wrap_content"
android:text="停止推流"
android:enabled="false"/>
<TextView
android:id="@+id/tvStatus"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="30dp"
android:text="就绪"
android:textSize="16sp"/>
</LinearLayout>

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
<monochrome android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
<monochrome android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 982 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.6 KiB

@ -0,0 +1,16 @@
<resources xmlns:tools="http://schemas.android.com/tools">
<!-- Base application theme. -->
<style name="Theme.DblStreams" parent="Theme.MaterialComponents.DayNight.DarkActionBar">
<!-- Primary brand color. -->
<item name="colorPrimary">@color/purple_200</item>
<item name="colorPrimaryVariant">@color/purple_700</item>
<item name="colorOnPrimary">@color/black</item>
<!-- Secondary brand color. -->
<item name="colorSecondary">@color/teal_200</item>
<item name="colorSecondaryVariant">@color/teal_200</item>
<item name="colorOnSecondary">@color/black</item>
<!-- Status bar color. -->
<item name="android:statusBarColor">?attr/colorPrimaryVariant</item>
<!-- Customize your theme here. -->
</style>
</resources>

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="purple_200">#FFBB86FC</color>
<color name="purple_500">#FF6200EE</color>
<color name="purple_700">#FF3700B3</color>
<color name="teal_200">#FF03DAC5</color>
<color name="teal_700">#FF018786</color>
<color name="black">#FF000000</color>
<color name="white">#FFFFFFFF</color>
</resources>

@ -0,0 +1,3 @@
<resources>
<string name="app_name">DblStreams</string>
</resources>

@ -0,0 +1,16 @@
<resources xmlns:tools="http://schemas.android.com/tools">
<!-- Base application theme. -->
<style name="Theme.DblStreams" parent="Theme.MaterialComponents.DayNight.DarkActionBar">
<!-- Primary brand color. -->
<item name="colorPrimary">@color/purple_500</item>
<item name="colorPrimaryVariant">@color/purple_700</item>
<item name="colorOnPrimary">@color/white</item>
<!-- Secondary brand color. -->
<item name="colorSecondary">@color/teal_200</item>
<item name="colorSecondaryVariant">@color/teal_700</item>
<item name="colorOnSecondary">@color/black</item>
<!-- Status bar color. -->
<item name="android:statusBarColor">?attr/colorPrimaryVariant</item>
<!-- Customize your theme here. -->
</style>
</resources>

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="utf-8"?><!--
Sample backup rules file; uncomment and customize as necessary.
See https://developer.android.com/guide/topics/data/autobackup
for details.
Note: This file is ignored for devices older that API 31
See https://developer.android.com/about/versions/12/backup-restore
-->
<full-backup-content>
<!--
<include domain="sharedpref" path="."/>
<exclude domain="sharedpref" path="device.xml"/>
-->
</full-backup-content>

@ -0,0 +1,19 @@
<?xml version="1.0" encoding="utf-8"?><!--
Sample data extraction rules file; uncomment and customize as necessary.
See https://developer.android.com/about/versions/12/backup-restore#xml-changes
for details.
-->
<data-extraction-rules>
<cloud-backup>
<!-- TODO: Use <include> and <exclude> to control what is backed up.
<include .../>
<exclude .../>
-->
</cloud-backup>
<!--
<device-transfer>
<include .../>
<exclude .../>
</device-transfer>
-->
</data-extraction-rules>

@ -0,0 +1,17 @@
package com.xypower.dblstreams;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() {
assertEquals(4, 2 + 2);
}
}

@ -0,0 +1,4 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
plugins {
id 'com.android.application' version '8.1.4' apply false
}

@ -0,0 +1,23 @@
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app's APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Enables namespacing of each library's R class so that its R class includes only the
# resources declared in the library itself and none from the library's dependencies,
# thereby reducing the size of the R class for that library
android.nonTransitiveRClass=true
hdrplusroot=D:/Workspace/deps/hdrplus_libs

Binary file not shown.

@ -0,0 +1,6 @@
#Thu Mar 27 12:12:29 CST 2025
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://mirrors.cloud.tencent.com/gradle/gradle-8.0-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

185
gradlew vendored

@ -0,0 +1,185 @@
#!/usr/bin/env sh
#
# Copyright 2015 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin or MSYS, switch paths to Windows format before running java
if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=`expr $i + 1`
done
case $i in
0) set -- ;;
1) set -- "$args0" ;;
2) set -- "$args0" "$args1" ;;
3) set -- "$args0" "$args1" "$args2" ;;
4) set -- "$args0" "$args1" "$args2" "$args3" ;;
5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=`save "$@"`
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
exec "$JAVACMD" "$@"

89
gradlew.bat vendored

@ -0,0 +1,89 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

@ -0,0 +1,17 @@
pluginManagement {
repositories {
google()
mavenCentral()
gradlePluginPortal()
}
}
dependencyResolutionManagement {
repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS)
repositories {
google()
mavenCentral()
}
}
rootProject.name = "DblStreams"
include ':app'
Loading…
Cancel
Save