Compare commits

...

No commits in common. 'main' and 'vulkan' have entirely different histories.
main ... vulkan

2
.gitignore vendored

@ -1,7 +1,9 @@
# ---> Android
# Gradle files
.gradle/
.google/
build/
app/release/
# Local configuration file (sdk path, etc)
local.properties

@ -1,64 +0,0 @@
buildscript {
repositories {
jcenter()
google()
mavenCentral()
}
dependencies {
classpath 'com.android.tools.build:gradle:7.2.1'
}
}
apply plugin: 'com.android.application'
repositories {
jcenter()
google()
mavenCentral()
}
dependencies {
implementation 'androidx.appcompat:appcompat:1.0.0'
implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
}
// The sample build uses multiple directories to
// keep boilerplate and common code separate from
// the main sample code.
List<String> dirs = [
'main', // main sample code; look here for the interesting stuff.
'common', // components that are reused by multiple samples
'template'] // boilerplate code that is generated by the sample template process
android {
compileSdk 33
defaultConfig {
minSdkVersion 24
//noinspection ExpiredTargetSdkVersion
targetSdkVersion 27
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
sourceSets {
main {
dirs.each { dir ->
java.srcDirs "src/${dir}/java"
res.srcDirs "src/${dir}/res"
}
}
androidTest.setRoot('tests')
androidTest.java.srcDirs = ['tests/src']
}
ndkVersion '26.1.10909125'
}

@ -1,15 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<issues format="6" by="lint 7.2.1" type="baseline" client="gradle" dependencies="false" name="AGP (7.2.1)" variant="fatal" version="7.2.1">
<issue
id="ExpiredTargetSdkVersion"
message="Google Play requires that apps target API level 30 or higher.&#xA;"
errorLine1=" targetSdkVersion 27"
errorLine2=" ~~~~~~~~~~~~~~~~~~~">
<location
file="build.gradle"
line="40"
column="9"/>
</issue>
</issues>

@ -1,20 +0,0 @@
{
"version": 3,
"artifactType": {
"type": "APK",
"kind": "Directory"
},
"applicationId": "com.xypower.mppreview",
"variantName": "release",
"elements": [
{
"type": "SINGLE",
"filters": [],
"attributes": [],
"versionCode": 1,
"versionName": "1.0",
"outputFile": "Application-release.apk"
}
],
"elementType": "File"
}

@ -1,35 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.xypower.mppreview"
android:versionCode="1"
android:versionName="1.0">
<!-- Min/target SDK versions (<uses-sdk>) managed by build.gradle -->
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" /> <!-- A camera with RAW capability is required to use this application -->
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.raw" />
<application
android:allowBackup="true"
android:icon="@drawable/ic_launcher"
android:label="@string/app_name"
android:requestLegacyExternalStorage="true"
android:theme="@style/MaterialTheme">
<activity
android:name=".MainActivity"
android:label="@string/app_name"
android:screenOrientation="landscape" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<activity
android:name=".CameraActivity"
android:label="@string/app_name"
android:screenOrientation="landscape">
</activity>
</application>
</manifest>

@ -1,55 +0,0 @@
/*
* Copyright 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.xypower.mppreview;
import android.app.Activity;
import android.os.Bundle;
import android.os.Handler;
/**
* Activity displaying a fragment that implements RAW photo captures.
*/
public class CameraActivity extends Activity {
Handler mHandler;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mHandler = new Handler();
setContentView(R.layout.activity_camera);
if (null == savedInstanceState) {
getFragmentManager().beginTransaction()
.replace(R.id.container, Camera2RawFragment.newInstance(false))
.commit();
}
}
public void reopenFragment(final boolean hdr) {
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
getFragmentManager().beginTransaction()
.replace(R.id.container, Camera2RawFragment.newInstance(hdr))
.commit();
}
}, 0);
}
}

@ -1,54 +0,0 @@
package com.xypower.mppreview;
import androidx.appcompat.app.AppCompatActivity;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.Switch;
public class MainActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
((Button)findViewById(R.id.channel1)).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
takePhoto(1);
}
});
((Button)findViewById(R.id.channel2)).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
takePhoto(2);
}
});
((Button)findViewById(R.id.channel3)).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
takePhoto(3);
}
});
}
protected void takePhoto(int channel) {
Switch hdrSwitch = (Switch) findViewById(R.id.hdr);
takePhoto(channel, hdrSwitch.isChecked());
}
protected void takePhoto(int channel, boolean hdr) {
int cameraId = channel - 1;
Intent intent = new Intent(MainActivity.this, CameraActivity.class);
intent.putExtra("cameraId", cameraId);
if (channel == 1) {
intent.putExtra("hdr", hdr);
}
startActivity(intent);
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 665 B

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.9 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.0 KiB

@ -1,5 +0,0 @@
<vector android:height="48dp" android:tint="#0000FF"
android:viewportHeight="24" android:viewportWidth="24"
android:width="48dp" xmlns:android="http://schemas.android.com/apk/res/android">
<path android:fillColor="@android:color/white" android:pathData="M20,11H7.83l5.59,-5.59L12,4l-8,8 8,8 1.41,-1.41L7.83,13H20v-2z"/>
</vector>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.3 KiB

@ -1,6 +0,0 @@
<vector android:height="24dp" android:tint="#0000FF"
android:viewportHeight="24" android:viewportWidth="24"
android:width="24dp" xmlns:android="http://schemas.android.com/apk/res/android">
<path android:fillColor="@android:color/white" android:pathData="M12,12m-3.2,0a3.2,3.2 0,1 1,6.4 0a3.2,3.2 0,1 1,-6.4 0"/>
<path android:fillColor="@android:color/white" android:pathData="M9,2L7.17,4L4,4c-1.1,0 -2,0.9 -2,2v12c0,1.1 0.9,2 2,2h16c1.1,0 2,-0.9 2,-2L22,6c0,-1.1 -0.9,-2 -2,-2h-3.17L15,2L9,2zM12,17c-2.76,0 -5,-2.24 -5,-5s2.24,-5 5,-5 5,2.24 5,5 -2.24,5 -5,5z"/>
</vector>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 196 B

@ -1,60 +0,0 @@
<?xml version="1.0" encoding="utf-8"?><!--
Copyright 2015 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:ignore="MissingDefaultResource">
<com.xypower.mppreview.AutoFitTextureView
android:id="@+id/texture"
android:layout_width="match_parent"
android:layout_height="match_parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
<ImageView
android:id="@+id/picture"
android:layout_width="48dp"
android:layout_height="48dp"
android:layout_gravity="center"
android:layout_marginBottom="8dp"
android:scaleType="fitXY"
android:alpha="0.5"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintEnd_toStartOf="@id/backMain"
app:layout_constraintHorizontal_bias="0.5"
app:srcCompat="@drawable/ic_take_photo"
/>
<ImageView
android:id="@+id/backMain"
android:layout_width="48dp"
android:layout_height="48dp"
android:scaleType="fitXY"
android:alpha="0.5"
app:layout_constraintStart_toEndOf="@id/picture"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintBottom_toBottomOf="parent"
app:srcCompat="@drawable/ic_back"
/>
</androidx.constraintlayout.widget.ConstraintLayout>

@ -1,22 +0,0 @@
<?xml version="1.0" encoding="utf-8"?><!--
Copyright 2015 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:id="@+id/container"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="#000"
tools:context="com.xypower.mppreview.CameraActivity" />

@ -1,59 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
<Button
android:id="@+id/channel1"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginLeft="16dp"
android:layout_marginTop="16dp"
android:text="通道1"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
<Button
android:id="@+id/channel2"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginLeft="16dp"
android:layout_marginTop="16dp"
android:text="通道2"
app:layout_constraintStart_toEndOf="@+id/channel1"
app:layout_constraintTop_toTopOf="parent" />
<Button
android:id="@+id/channel3"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginLeft="16dp"
android:layout_marginTop="16dp"
android:text="通道3"
app:layout_constraintStart_toEndOf="@+id/channel2"
app:layout_constraintTop_toTopOf="parent" />
<TextView
android:id="@+id/textView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginLeft="16dp"
android:layout_marginTop="24dp"
android:text="HDR"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@id/channel1" />
<Switch
android:id="@+id/hdr"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
app:layout_constraintStart_toEndOf="@id/textView"
app:layout_constraintTop_toTopOf="@id/textView"
app:layout_constraintBottom_toBottomOf="@id/textView" />
</androidx.constraintlayout.widget.ConstraintLayout>

@ -1,24 +0,0 @@
<!--
Copyright 2013 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<!-- Semantic definitions -->
<dimen name="horizontal_page_margin">@dimen/margin_huge</dimen>
<dimen name="vertical_page_margin">@dimen/margin_medium</dimen>
</resources>

@ -1,25 +0,0 @@
<!--
Copyright 2013 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<style name="Widget.SampleMessage">
<item name="android:textAppearance">?android:textAppearanceLarge</item>
<item name="android:lineSpacingMultiplier">1.2</item>
<item name="android:shadowDy">-6.5</item>
</style>
</resources>

@ -1,22 +0,0 @@
<!--
Copyright 2013 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<!-- Activity themes -->
<style name="Theme.Base" parent="android:Theme.Holo.Light" />
</resources>

@ -1,21 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright 2013 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
</resources>

@ -1,24 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright 2013 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<!-- Activity themes -->
<style name="Theme.Base" parent="android:Theme.Material.Light">
</style>
</resources>

@ -1,22 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright 2013 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<string name="app_name">微拍预览</string>
<string name="intro_message">微拍预览
</string>
</resources>

@ -1,20 +0,0 @@
<?xml version="1.0" encoding="utf-8"?><!--
Copyright 2015 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<string name="picture">拍照</string>
<string name="description_info">Info</string>
<string name="request_permission">This app needs camera permission.</string>
</resources>

@ -1,18 +0,0 @@
<?xml version="1.0" encoding="utf-8"?><!--
Copyright 2015 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<style name="MaterialTheme" parent="android:Theme.Material.Light.NoActionBar.Fullscreen" />
</resources>

@ -1,32 +0,0 @@
<!--
Copyright 2013 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<!-- Define standard dimensions to comply with Holo-style grids and rhythm. -->
<dimen name="margin_tiny">4dp</dimen>
<dimen name="margin_small">8dp</dimen>
<dimen name="margin_medium">16dp</dimen>
<dimen name="margin_large">32dp</dimen>
<dimen name="margin_huge">64dp</dimen>
<!-- Semantic definitions -->
<dimen name="horizontal_page_margin">@dimen/margin_medium</dimen>
<dimen name="vertical_page_margin">@dimen/margin_medium</dimen>
</resources>

@ -1,42 +0,0 @@
<!--
Copyright 2013 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<resources>
<!-- Activity themes -->
<style name="Theme.Base" parent="android:Theme.Light" />
<style name="Theme.Sample" parent="Theme.Base" />
<style name="AppTheme" parent="Theme.Sample" />
<!-- Widget styling -->
<style name="Widget" />
<style name="Widget.SampleMessage">
<item name="android:textAppearance">?android:textAppearanceMedium</item>
<item name="android:lineSpacingMultiplier">1.1</item>
</style>
<style name="Widget.SampleMessageTile">
<item name="android:background">@drawable/tile</item>
<item name="android:shadowColor">#7F000000</item>
<item name="android:shadowDy">-3.5</item>
<item name="android:shadowRadius">2</item>
</style>
</resources>

@ -1,40 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<!--
Copyright (C) 2013 The Android Open Source Project
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<!-- package name must be unique so suffix with "tests" so package loader doesn't ignore us -->
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.xypower.mppreview.tests"
android:versionCode="1"
android:versionName="1.0">
<!-- Min/target SDK versions (<uses-sdk>) managed by build.gradle -->
<!-- We add an application tag here just so that we can indicate that
this package needs to link against the android.test library,
which is needed when building test cases. -->
<application>
<uses-library android:name="android.test.runner" />
</application>
<!--
Specifies the instrumentation test runner used to run the tests.
-->
<instrumentation
android:name="android.test.InstrumentationTestRunner"
android:targetPackage="com.xypower.mppreview"
android:label="Tests for com.xypower.mppreview" />
</manifest>

@ -1,57 +0,0 @@
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.xypower.mppreview.tests;
import com.xypower.mppreview.*;
import android.test.ActivityInstrumentationTestCase2;
/**
* Tests for Camera2Raw sample.
*/
public class SampleTests extends ActivityInstrumentationTestCase2<CameraActivity> {
private CameraActivity mTestActivity;
public SampleTests() {
super(CameraActivity.class);
}
@Override
protected void setUp() throws Exception {
super.setUp();
// Starts the activity under test using the default Intent with:
// action = {@link Intent#ACTION_MAIN}
// flags = {@link Intent#FLAG_ACTIVITY_NEW_TASK}
// All other fields are null or empty.
mTestActivity = getActivity();
}
/**
* Test if the test fixture has been set up correctly.
*/
public void testPreconditions() {
//Try to add a message to add context to your assertions. These messages will be shown if
//a tests fails and make it easy to understand why a test failed
assertNotNull("mTestActivity is null", mTestActivity);
}
/**
* Add more tests below.
*/
}

@ -1,35 +0,0 @@
# How to become a contributor and submit your own code
## Contributor License Agreements
We'd love to accept your sample apps and patches! Before we can take them, we
have to jump a couple of legal hurdles.
Please fill out either the individual or corporate Contributor License Agreement (CLA).
* If you are an individual writing original source code and you're sure you
own the intellectual property, then you'll need to sign an [individual CLA]
(https://developers.google.com/open-source/cla/individual).
* If you work for a company that wants to allow you to contribute your work,
then you'll need to sign a [corporate CLA]
(https://developers.google.com/open-source/cla/corporate).
Follow either of the two links above to access the appropriate CLA and
instructions for how to sign and return it. Once we receive it, we'll be able to
accept your pull requests.
## Contributing A Patch
1. Submit an issue describing your proposed change to the repo in question.
1. The repo owner will respond to your issue promptly.
1. If your proposed change is accepted, and you haven't already done so, sign a
Contributor License Agreement (see details above).
1. Fork the desired repo, develop and test your code changes.
1. Ensure that your code adheres to the existing style in the sample to which
you are contributing. Refer to the
[Android Code Style Guide]
(https://source.android.com/source/code-style.html) for the
recommended coding standards for this organization.
1. Ensure that your code has an appropriate set of unit tests which all pass.
1. Submit a pull request.

@ -1,35 +0,0 @@
# How to become a contributor and submit your own code
## Contributor License Agreements
We'd love to accept your sample apps and patches! Before we can take them, we
have to jump a couple of legal hurdles.
Please fill out either the individual or corporate Contributor License Agreement (CLA).
* If you are an individual writing original source code and you're sure you
own the intellectual property, then you'll need to sign an [individual CLA]
(https://cla.developers.google.com).
* If you work for a company that wants to allow you to contribute your work,
then you'll need to sign a [corporate CLA]
(https://cla.developers.google.com).
Follow either of the two links above to access the appropriate CLA and
instructions for how to sign and return it. Once we receive it, we'll be able to
accept your pull requests.
## Contributing A Patch
1. Submit an issue describing your proposed change to the repo in question.
1. The repo owner will respond to your issue promptly.
1. If your proposed change is accepted, and you haven't already done so, sign a
Contributor License Agreement (see details above).
1. Fork the desired repo, develop and test your code changes.
1. Ensure that your code adheres to the existing style in the sample to which
you are contributing. Refer to the
[Android Code Style Guide]
(https://source.android.com/source/code-style.html) for the
recommended coding standards for this organization.
1. Ensure that your code has an appropriate set of unit tests which all pass.
1. Submit a pull request.

@ -1,203 +0,0 @@
Apache License
--------------
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

@ -1,2 +0,0 @@
# camera2Raw

1
app/.gitignore vendored

@ -0,0 +1 @@
/build

@ -0,0 +1,87 @@
plugins {
id 'com.android.application'
}
android {
compileSdk 33
defaultConfig {
applicationId "com.xypower.mppreview"
minSdk 28
targetSdk 30
versionCode 3
versionName "1.2"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
externalNativeBuild {
cmake {
// cppFlags '-std=c++17 -frtti -fexceptions -Wno-error=format-security'
cppFlags '-std=c++17 -fexceptions -Wno-error=format-security -fopenmp '
// cppFlags '-std=c++17 -Wno-error=format-security'
// arguments "-DANDROID_STL=c++_shared"
// arguments "-DNCNN_DISABLE_EXCEPTION=OFF", "-DOpenCV_DIR=" + opencvsdk + "/sdk/native/jni", "-DNCNN_ROOT=" + ncnnroot
arguments "-DOpenCV_DIR=" + opencvsdk + "/sdk/native/jni"
abiFilters 'arm64-v8a'
// setAbiFilters(['arm64-v8a'])
}
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
debug {
jniDebuggable true
}
}
// sourceSets {
// main{
// jniLibs.srcDirs=['src/main/jniLibs']
// }
// }
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
externalNativeBuild {
cmake {
path file('src/main/cpp/CMakeLists.txt')
version '3.22.1'
}
}
buildFeatures {
viewBinding true
}
}
dependencies {
implementation 'androidx.appcompat:appcompat:1.3.0'
implementation 'com.google.android.material:material:1.4.0'
implementation 'androidx.constraintlayout:constraintlayout:2.0.4'
testImplementation 'junit:junit:4.13.2'
androidTestImplementation 'androidx.test.ext:junit:1.1.3'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0'
//CameraX
// implementation "androidx.camera:camera-core:1.4.1"
// implementation "androidx.camera:camera-camera2:1.4.1"
// implementation "androidx.camera:camera-view:1.4.1"
// implementation "androidx.camera:camera-lifecycle:1.4.1"
// implementation "androidx.camera:camera-video:1.4.1"
// CameraX core library using camera2 implementation
implementation "androidx.camera:camera-camera2:1.1.0"
// CameraX Lifecycle Library
implementation "androidx.camera:camera-lifecycle:1.1.0"
// CameraX View class
implementation "androidx.camera:camera-view:1.1.0"
}

@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

@ -0,0 +1,26 @@
package com.xypower.mppreview;
import android.content.Context;
import androidx.test.platform.app.InstrumentationRegistry;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
/**
* Instrumented test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
assertEquals("com.xypower.camera2raw", appContext.getPackageName());
}
}

@ -0,0 +1,57 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
package="com.xypower.mppreview">
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.MANAGE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_PHONE_STATE" />
<!-- A camera with RAW capability is required to use this application -->
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.raw" />
<application
android:allowBackup="true"
android:dataExtractionRules="@xml/data_extraction_rules"
android:fullBackupContent="@xml/backup_rules"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/Theme.MpPreview"
android:requestLegacyExternalStorage="true"
tools:targetApi="30">
<activity
android:name=".ui.CameraChannelActivity"
android:exported="false" />
<activity
android:name=".ui.CameraActivity"
android:exported="false" />
<activity
android:name=".MainActivity"
android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<provider
android:name="androidx.core.content.FileProvider"
android:authorities="com.stg.rouge.activity.fileprovider"
android:exported="false"
android:grantUriPermissions="true"
tools:replace="android:authorities">
<meta-data
android:name="android.support.FILE_PROVIDER_PATHS"
android:resource="@xml/file_paths_public"
tools:replace="android:resource" />
</provider>
</application>
</manifest>

@ -0,0 +1,124 @@
#include "BmpLoader.h"
#include <algorithm>
#include <cmath>
BmpInfo BmpLoader::readBmpInfo(const std::string& filePath) {
std::ifstream file(filePath, std::ios::binary);
if (!file.is_open()) {
throw std::runtime_error("Failed to open BMP file: " + filePath);
}
BmpHeader header;
file.read(reinterpret_cast<char*>(&header), sizeof(header));
if (header.signature != 0x4D42) { // 'BM'
throw std::runtime_error("Invalid BMP signature in file: " + filePath);
}
BmpInfoHeader infoHeader;
file.read(reinterpret_cast<char*>(&infoHeader), sizeof(infoHeader));
BmpInfo info;
info.width = infoHeader.width;
info.height = std::abs(infoHeader.height); // Handle bottom-up or top-down BMPs
info.bitsPerPixel = infoHeader.bitsPerPixel;
info.dataOffset = header.dataOffset;
// Calculate row padding (rows are padded to 4-byte boundary)
int bytesPerPixel = info.bitsPerPixel / 8;
info.rowSize = info.width * bytesPerPixel;
info.rowPadding = (4 - (info.rowSize % 4)) % 4;
file.close();
return info;
}
std::vector<uint8_t> BmpLoader::readBmpRegion(
const std::string& filePath,
const BmpInfo& info,
int32_t startX, int32_t startY,
int32_t width, int32_t height) {
std::ifstream file(filePath, std::ios::binary);
if (!file.is_open()) {
throw std::runtime_error("Failed to open BMP file: " + filePath);
}
// Clamp region to image dimensions
startX = std::max(0, startX);
startY = std::max(0, startY);
width = std::min(width, info.width - startX);
height = std::min(height, info.height - startY);
if (width <= 0 || height <= 0) {
return std::vector<uint8_t>();
}
// Calculate bytes per pixel
int bytesPerPixel = info.bitsPerPixel / 8;
int regionRowSize = width * bytesPerPixel;
// Allocate memory for the region
std::vector<uint8_t> regionData(width * height * bytesPerPixel);
// Read data row by row
for (int y = 0; y < height; y++) {
// Calculate source position in file
int sourceY = (info.height - 1 - (startY + y)); // Account for BMP being stored bottom-up
long offset = info.dataOffset + (sourceY * (info.rowSize + info.rowPadding)) + startX * bytesPerPixel;
file.seekg(offset, std::ios::beg);
file.read(reinterpret_cast<char*>(regionData.data() + y * regionRowSize), regionRowSize);
}
file.close();
return regionData;
}
// Convert sRGB to linear
float srgbToLinear(float srgb) {
if (srgb <= 0.04045f)
return srgb / 12.92f;
else
return pow((srgb + 0.055f) / 1.055f, 2.4f);
}
std::vector<float> BmpLoader::readBmpRegionAsFloat(
const std::string& filePath,
const BmpInfo& info,
int32_t startX, int32_t startY,
int32_t width, int32_t height) {
auto data = readBmpRegion(filePath, info, startX, startY, width, height);
if (data.empty()) {
return std::vector<float>();
}
int bytesPerPixel = info.bitsPerPixel / 8;
std::vector<float> floatData(width * height * 3); // Always RGB float output
// Convert each component from sRGB to linear space
#if 0
for (int i = 0; i < width * height; i++) {
floatData[i * 3 + 0] = srgbToLinear(data[i * bytesPerPixel + 2] / 255.0f); // R
floatData[i * 3 + 1] = srgbToLinear(data[i * bytesPerPixel + 1] / 255.0f); // G
floatData[i * 3 + 2] = srgbToLinear(data[i * bytesPerPixel + 0] / 255.0f); // B
}
#endif
for (int i = 0; i < width * height; i++) {
if (bytesPerPixel == 3 || bytesPerPixel == 4) { // RGB or RGBA
floatData[i * 3 + 0] = data[i * bytesPerPixel + 2] / 255.0f; // R (BGR format in BMP)
floatData[i * 3 + 1] = data[i * bytesPerPixel + 1] / 255.0f; // G
floatData[i * 3 + 2] = data[i * bytesPerPixel + 0] / 255.0f; // B
} else if (bytesPerPixel == 1) { // Grayscale
floatData[i * 3 + 0] = data[i] / 255.0f; // R
floatData[i * 3 + 1] = data[i] / 255.0f; // G
floatData[i * 3 + 2] = data[i] / 255.0f; // B
}
}
return floatData;
}

@ -0,0 +1,54 @@
#pragma once
#include <vector>
#include <string>
#include <cstdint>
#include <fstream>
#include <stdexcept>
#pragma pack(push, 1)
struct BmpHeader {
uint16_t signature; // 'BM'
uint32_t fileSize; // Size of the BMP file
uint16_t reserved1; // Reserved
uint16_t reserved2; // Reserved
uint32_t dataOffset; // Offset to the start of image data
};
struct BmpInfoHeader {
uint32_t headerSize; // Size of the info header
int32_t width; // Width of the image
int32_t height; // Height of the image
uint16_t planes; // Number of color planes
uint16_t bitsPerPixel; // Bits per pixel
uint32_t compression; // Compression type
uint32_t imageSize; // Image size in bytes
int32_t xPixelsPerMeter; // X resolution
int32_t yPixelsPerMeter; // Y resolution
uint32_t colorsUsed; // Number of colors used
uint32_t colorsImportant;// Number of important colors
};
#pragma pack(pop)
struct BmpInfo {
int32_t width;
int32_t height;
int32_t bitsPerPixel;
uint32_t dataOffset;
int32_t rowPadding;
int32_t rowSize;
};
class BmpLoader {
public:
static BmpInfo readBmpInfo(const std::string& filePath);
static std::vector<uint8_t> readBmpRegion(
const std::string& filePath,
const BmpInfo& info,
int32_t startX, int32_t startY,
int32_t width, int32_t height);
static std::vector<float> readBmpRegionAsFloat(
const std::string& filePath,
const BmpInfo& info,
int32_t startX, int32_t startY,
int32_t width, int32_t height);
};

@ -0,0 +1,190 @@
# For more information about using CMake with Android Studio, read the
# documentation: https://d.android.com/studio/projects/add-native-code.html
# Sets the minimum version of CMake required to build the native library.
cmake_minimum_required(VERSION 3.22.1)
# enable_language(ASM)
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fopenmp -static-openmp -ffunction-sections -fdata-sections -Wformat-security ")
set(CMAKE_CXX_FLAGS "${CMAKE_C_FLAGS}")
# SET_TARGET_PROPERTIES(microphoto PROPERTIES LINK_FLAGS "-Wl,-s,--gc-sections")
IF (CMAKE_BUILD_TYPE STREQUAL Debug)
ADD_DEFINITIONS(-D_DEBUG)
ELSE()
ADD_DEFINITIONS(-DNDEBUG)
ENDIF()
project("mppreview")
message(WARNING "ANDROID_NDK ${ANDROID_NDK}")
# Find glslc compiler from Vulkan SDK - use explicit path
if(ANDROID)
# For Android Studio/NDK builds
if(DEFINED ENV{ANDROID_NDK})
set(GLSLC_EXECUTABLE "$ENV{ANDROID_NDK}/shader-tools/${ANDROID_ABI}/glslc")
else()
set(GLSLC_EXECUTABLE "${ANDROID_NDK}/shader-tools/${ANDROID_ABI}/glslc")
endif()
else()
# For Windows builds
if(DEFINED ENV{VULKAN_SDK})
set(GLSLC_EXECUTABLE "$ENV{VULKAN_SDK}/Bin/glslc.exe")
else()
# Common installation paths
find_program(GLSLC_EXECUTABLE
NAMES glslc
PATHS
"C:/VulkanSDK/*/Bin"
"D:/VulkanSDK/*/Bin"
"$ENV{PROGRAMFILES}/VulkanSDK/*/Bin"
)
endif()
endif()
set(GLSLC_EXECUTABLE ${ANDROID_NDK}/shader-tools/windows-x86_64/glslc)
if(NOT GLSLC_EXECUTABLE)
message(WARNING "Could not find glslc executable. Shader compilation will be skipped.")
endif()
# Find Vulkan
find_package(Vulkan REQUIRED)
# Remove existing shaders directory to ensure clean copy
file(REMOVE_RECURSE ${CMAKE_CURRENT_BINARY_DIR}/shaders)
# Create directory for shader files
file(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/shaders)
# Copy each shader file individually
file(GLOB SHADER_FILES "${CMAKE_CURRENT_SOURCE_DIR}/shaders/*.comp" "${CMAKE_CURRENT_SOURCE_DIR}/shaders/*.vert" "${CMAKE_CURRENT_SOURCE_DIR}/shaders/*.frag" "${CMAKE_CURRENT_SOURCE_DIR}/shaders/*.spv")
foreach(SHADER_FILE ${SHADER_FILES})
get_filename_component(SHADER_FILENAME ${SHADER_FILE} NAME)
configure_file(${SHADER_FILE} ${CMAKE_CURRENT_BINARY_DIR}/shaders/${SHADER_FILENAME} COPYONLY)
endforeach()
include_directories(D:/Workspace/deps/hdrplus_libs/${ANDROID_ABI}/include)
link_directories(D:/Workspace/deps/hdrplus_libs/${ANDROID_ABI}/lib)
find_package(OpenCV REQUIRED core imgproc highgui photo)
# find_package(OpenCV REQUIRED core imgproc)
if(OpenCV_FOUND)
include_directories(${OpenCV_INCLUDE_DIRS})
message(WARNING "OpenCV library status:")
message(WARNING " version: ${OpenCV_VERSION}")
message(WARNING " libraries: ${OpenCV_LIBS}")
message(WARNING " include path: ${OpenCV_INCLUDE_DIRS}")
else(OpenCV_FOUND)
message(FATAL_ERROR "OpenCV library not found")
endif(OpenCV_FOUND)
# libzip
# set(libzip_DIR ${LIBZIP_ROOT}/${ANDROID_ABI}/lib/cmake/libzip)
# find_package(libzip REQUIRED)
# ncnn
# set(ncnn_DIR ${NCNN_ROOT}/${ANDROID_ABI}/lib/cmake/ncnn)
# find_package(ncnn REQUIRED)
# OpenMP
find_package(OpenMP REQUIRED)
add_library( # Sets the name of the library.
${PROJECT_NAME}
# Sets the library as a shared library.
SHARED
# Provides a relative path to your source file(s).
MpPreview.cpp
HdrImpl.cpp
BmpLoader.cpp
vulkan_hdr_generator.cpp
HdrWriter.cpp
)
# Searches for a specified prebuilt library and stores the path as a
# variable. Because CMake includes system libraries in the search path by
# default, you only need to specify the name of the public NDK library
# you want to add. CMake verifies that the library exists before
# completing its build.
# Find glslc compiler from Vulkan SDK
find_program(GLSLC_EXECUTABLE glslc REQUIRED)
# First compile the shader
add_custom_command(
OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/shaders/hdr_merge.comp.spv
COMMAND ${GLSLC_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/shaders/hdr_merge.comp -o ${CMAKE_CURRENT_BINARY_DIR}/shaders/hdr_merge.comp.spv
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/shaders/hdr_merge.comp
COMMENT "Compiling compute shader hdr_merge.comp"
)
# CREATE THE MISSING TARGET - Add this line
add_custom_target(shaders DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/shaders/hdr_merge.comp.spv)
# Now you can use the target in a post-build command
add_custom_command(
TARGET shaders POST_BUILD
COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_CURRENT_SOURCE_DIR}/../assets/shaders
COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_BINARY_DIR}/shaders/hdr_merge.comp.spv ${CMAKE_CURRENT_SOURCE_DIR}/../assets/shaders/
COMMENT "Copying shader to assets directory"
)
# Make sure your main library depends on the shaders target - Add this line
add_dependencies(${PROJECT_NAME} shaders)
find_library( # Sets the name of the path variable.
log-lib
# Specifies the name of the NDK library that
# you want CMake to locate.
log)
# Specifies libraries CMake should link to your target library. You
# can link multiple libraries, such as libraries you define in this
# build script, prebuilt third-party libraries, or system libraries.
# Link with Vulkan
target_include_directories(${PROJECT_NAME} PRIVATE ${Vulkan_INCLUDE_DIRS})
target_link_libraries(${PROJECT_NAME} PRIVATE ${Vulkan_LIBRARIES})
target_link_libraries( # Specifies the target library.
${PROJECT_NAME}
PUBLIC -fopenmp -static-openmp
# Links the target library to the log library
# included in the NDK.
${log-lib}
android mediandk z jnigraphics
#ncnn
raw raw_r
${OpenCV_LIBS}
)
add_executable( libhdr.so
hdr.cpp
HdrImpl.cpp)
target_link_libraries( libhdr.so PUBLIC -fopenmp -static-openmp
android z
-fopenmp -static-openmp
${OpenCV_LIBS}
raw raw_r
)
# set_target_properties(${PROJECT_NAME} PROPERTIES LINK_FLAGS_RELEASE "-strip-all")

@ -0,0 +1,67 @@
#include <jni.h>
#include <string>
#include <vector>
#include <fcntl.h>
#include <unistd.h>
#include <omp.h>
#include <opencv2/opencv.hpp>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#include "hdr.h"
bool makeHdr(std::vector<float>& times, std::vector<cv::Mat>& images, cv::Mat& rgb)
{
// Align input images
// cout << "Aligning images ... " << endl;
cv::Ptr<cv::AlignMTB> alignMTB = cv::createAlignMTB();
#if 0
alignMTB->process(images, images);
#endif
// Obtain Camera Response Function (CRF)
// ALOGI("Calculating Camera Response Function (CRF) ... ");
cv::Mat responseDebevec;
cv::Ptr<cv::CalibrateDebevec> calibrateDebevec = cv::createCalibrateDebevec();
calibrateDebevec->process(images, responseDebevec, times);
// Merge images into an HDR linear image
// ALOGI("Merging images into one HDR image ... ");
cv::Mat hdrDebevec;
cv::Ptr<cv::MergeDebevec> mergeDebevec = cv::createMergeDebevec();
mergeDebevec->process(images, hdrDebevec, times, responseDebevec);
// Save HDR image.
// imwrite((OUTPUT_DIR "hdrDebevec.hdr"), hdrDebevec);
// cout << "saved hdrDebevec.hdr " << endl;
{
std::vector<cv::Mat> empty;
empty.swap(images);
}
// Tonemap using Reinhard's method to obtain 24-bit color image
// ALOGI("Tonemaping using Reinhard's method ... ");
cv::Mat ldrReinhard;
cv::Ptr<cv::TonemapReinhard> tonemapReinhard = cv::createTonemapReinhard(1.5, 0, 0, 0);
tonemapReinhard->process(hdrDebevec, ldrReinhard);
hdrDebevec.release();
int type = ldrReinhard.type();
ldrReinhard = ldrReinhard * 255;
ldrReinhard.convertTo(rgb, CV_8U);
ldrReinhard.release();
mergeDebevec.release();
responseDebevec.release();
alignMTB.release();
tonemapReinhard.release();
calibrateDebevec.release();
mergeDebevec.release();
return true;
}

@ -0,0 +1,170 @@
//
// Created by Matthew on 2025/3/22.
//
#include "HdrWriter.h"
#include <cmath>
#include <algorithm>
#include <vector>
// Define STB_IMAGE_WRITE_IMPLEMENTATION in exactly one CPP file
#define STB_IMAGE_WRITE_IMPLEMENTATION
#include "stb_image_write.h" // Download this header from https://github.com/nothings/stb
float toneMap(float value) {
return value / (1.0f + value); // Reinhard tone mapping
}
float applyGammaCorrection(float value) {
return pow(value, 1.0f / 2.2f); // Gamma correction
}
bool HdrWriter::writeRGBE(const std::string& filename,
const std::vector<float>& data,
int width, int height) {
std::ofstream file(filename, std::ios::binary);
if (!file.is_open()) {
return false;
}
writeHeader(file, width, height);
// Write pixel data in RGBE format
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
int pixelIndex = (y * width + x) * 3;
float r = data[pixelIndex];
float g = data[pixelIndex + 1];
float b = data[pixelIndex + 2];
uint8_t rgbe[4];
rgbeFromFloat(r, g, b, rgbe);
file.write(reinterpret_cast<char*>(rgbe), 4);
}
}
return true;
}
void HdrWriter::writeHeader(std::ofstream& file, int width, int height) {
// Write Radiance HDR header
file << "#?RADIANCE\n";
file << "FORMAT=32-bit_rle_rgbe\n\n";
file << "-Y " << height << " +X " << width << "\n";
}
void HdrWriter::rgbeFromFloat(float r, float g, float b, uint8_t rgbe[4]) {
float v = std::max(r, std::max(g, b));
if (v < 1e-32f) {
rgbe[0] = rgbe[1] = rgbe[2] = rgbe[3] = 0;
} else {
int e;
v = frexpf(v, &e) * 256.0f / v;
rgbe[0] = static_cast<uint8_t>(r * v);
rgbe[1] = static_cast<uint8_t>(g * v);
rgbe[2] = static_cast<uint8_t>(b * v);
rgbe[3] = static_cast<uint8_t>(e + 128);
}
}
bool HdrWriter::writeRGB(const std::string& filename,
const std::vector<float>& data,
int width, int height,
Format format) {
if (format == Format::BMP) {
return writeBMP(filename, data, width, height);
}
// For PNG/JPEG via stb_image_write.h
// (Add this implementation if needed)
return false;
}
bool HdrWriter::writeBMP(const std::string& filename,
const std::vector<float>& data,
int width, int height) {
std::ofstream file(filename, std::ios::binary);
if (!file.is_open()) {
return false;
}
// BMP requires rows to be padded to multiples of 4 bytes
int paddingSize = (4 - (width * 3) % 4) % 4;
int rowSize = width * 3 + paddingSize;
int dataSize = rowSize * height;
int fileSize = 54 + dataSize; // 54 bytes for header + pixel data
// BMP File Header (14 bytes)
uint8_t fileHeader[14] = {
'B', 'M', // Signature
(uint8_t)(fileSize), (uint8_t)(fileSize >> 8), // File size in bytes
(uint8_t)(fileSize >> 16), (uint8_t)(fileSize >> 24),
0, 0, 0, 0, // Reserved
54, 0, 0, 0 // Offset to pixel data
};
// BMP Info Header (40 bytes)
uint8_t infoHeader[40] = {
40, 0, 0, 0, // Info header size
(uint8_t)(width), (uint8_t)(width >> 8), // Width
(uint8_t)(width >> 16), (uint8_t)(width >> 24),
(uint8_t)(height), (uint8_t)(height >> 8), // Height (negative for top-down)
(uint8_t)(height >> 16), (uint8_t)(height >> 24),
1, 0, // Number of color planes
24, 0, // Bits per pixel (24 for RGB)
0, 0, 0, 0, // No compression
0, 0, 0, 0, // Image size (can be 0 for no compression)
0, 0, 0, 0, // X pixels per meter
0, 0, 0, 0, // Y pixels per meter
0, 0, 0, 0, // Total colors (default)
0, 0, 0, 0 // Important colors (default)
};
file.write(reinterpret_cast<char*>(fileHeader), 14);
file.write(reinterpret_cast<char*>(infoHeader), 40);
// Padding bytes (zeros)
std::vector<uint8_t> padding(paddingSize, 0);
// Write pixel data (BGR order, bottom-to-top for standard BMP)
for (int y = height - 1; y >= 0; y--) { // BMP stores rows bottom-to-top
for (int x = 0; x < width; x++) {
int pixelIndex = (y * width + x) * 3;
// Apply tone mapping and gamma correction
#if 0
float r = toneMap(data[pixelIndex + 0]);
float g = toneMap(data[pixelIndex + 1]);
float b = toneMap(data[pixelIndex + 2]);
#endif
float exposure = 1.5f; // Adjust this value as needed
float r = toneMap(data[pixelIndex + 0] * exposure);
float g = toneMap(data[pixelIndex + 1] * exposure);
float b = toneMap(data[pixelIndex + 2] * exposure);
r = applyGammaCorrection(r);
g = applyGammaCorrection(g);
b = applyGammaCorrection(b);
// Clamp and convert to byte
uint8_t pixelData[3] = {
static_cast<uint8_t>(std::min(1.0f, b) * 255.0f), // B
static_cast<uint8_t>(std::min(1.0f, g) * 255.0f), // G
static_cast<uint8_t>(std::min(1.0f, r) * 255.0f) // R
};
file.write(reinterpret_cast<char*>(pixelData), 3);
}
// Write padding bytes
if (paddingSize > 0) {
file.write(reinterpret_cast<char*>(padding.data()), paddingSize);
}
}
return file.good();
}

@ -0,0 +1,40 @@
//
// Created by Matthew on 2025/3/22.
//
#ifndef MPPREVIEW_HDRWRITER_H
#define MPPREVIEW_HDRWRITER_H
#include <vector>
#include <string>
#include <cstdint>
#include <fstream>
class HdrWriter {
public:
static bool writeRGBE(const std::string& filename,
const std::vector<float>& data,
int width, int height);
// New RGB file format support
enum class Format {
PNG,
JPEG,
BMP
};
static bool writeRGB(const std::string& filename,
const std::vector<float>& data,
int width, int height,
Format format = Format::PNG);
private:
static void writeHeader(std::ofstream& file, int width, int height);
static void rgbeFromFloat(float r, float g, float b, uint8_t rgbe[4]);
static bool writeBMP(const std::string& filename,
const std::vector<float>& data,
int width, int height);
};
#endif //MPPREVIEW_HDRWRITER_H

@ -0,0 +1,569 @@
#include <jni.h>
#include <string>
#include <vector>
// #include "ncnn/yolov5ncnn.h"
#include <fcntl.h>
#include <unistd.h>
#include <omp.h>
#include <android/imagedecoder.h>
#include <android/log.h>
#include <media/NdkImage.h>
#include <opencv2/opencv.hpp>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#include "hdr.h"
#include "BmpLoader.h"
#include "vulkan_hdr_generator.h"
namespace cv2
{
using namespace cv;
Mat linearResponseNew(int channels)
{
Mat response = Mat(LDR_SIZE, 1, CV_MAKETYPE(CV_32F, channels));
for(int i = 0; i < LDR_SIZE; i++) {
response.at<Vec3f>(i) = Vec3f::all(static_cast<float>(i));
}
return response;
}
Mat triangleWeightsNew()
{
// hat function
Mat w(LDR_SIZE, 1, CV_32F);
int half = LDR_SIZE / 2;
for(int i = 0; i < LDR_SIZE; i++) {
w.at<float>(i) = i < half ? i + 1.0f : LDR_SIZE - i;
}
return w;
}
class CV_EXPORTS_W MergeExposuresNew
{
public:
virtual ~MergeExposuresNew() {}
/** @brief Merges images.
@param src vector of input images
@param dst result image
@param times vector of exposure time values for each image
@param response 256x1 matrix with inverse camera response function for each pixel value, it should
have the same number of channels as images.
*/
CV_WRAP virtual void process(InputArrayOfArrays src, OutputArray dst,
InputArray times, InputArray response) = 0;
};
class CV_EXPORTS_W MergeDebevecNew : public MergeExposuresNew
{
public:
CV_WRAP virtual void process(InputArrayOfArrays src, OutputArray dst,
InputArray times, InputArray response) CV_OVERRIDE = 0;
CV_WRAP virtual void process(InputArrayOfArrays src, OutputArray dst, InputArray times) = 0;
};
class MergeDebevecImplNew CV_FINAL : public MergeDebevecNew
{
public:
MergeDebevecImplNew() :
name("MergeDebevecNew"),
weights(triangleWeightsNew())
{
}
void process(InputArrayOfArrays src, OutputArray dst, InputArray _times, InputArray input_response) CV_OVERRIDE
{
// CV_INSTRUMENT_REGION();
ALOGD("HDR Merge 1");
std::vector<Mat> images;
src.getMatVector(images);
Mat times = _times.getMat();
#if 0
CV_Assert(images.size() == times.total());
checkImageDimensions(images);
CV_Assert(images[0].depth() == CV_8U);
#endif
int channels = images[0].channels();
Size size = images[0].size();
int CV_32FCC = CV_MAKETYPE(CV_32F, channels);
ALOGD("HDR Merge 2");
dst.create(images[0].size(), CV_32FCC);
Mat result = dst.getMat();
Mat response = input_response.getMat();
if(response.empty()) {
response = linearResponseNew(channels);
response.at<Vec3f>(0) = response.at<Vec3f>(1);
}
ALOGD("HDR Merge 3");
Mat log_response;
log(response, log_response);
CV_Assert(log_response.rows == LDR_SIZE && log_response.cols == 1 &&
log_response.channels() == channels);
Mat exp_values(times.clone());
log(exp_values, exp_values);
ALOGD("HDR Merge 4");
result = Mat::zeros(size, CV_32FCC);
std::vector<Mat> result_split;
split(result, result_split);
Mat weight_sum = Mat::zeros(size, CV_32F);
ALOGD("HDR Merge 5");
for(size_t i = 0; i < images.size(); i++) {
std::vector<Mat> splitted;
split(images[i], splitted);
ALOGD("HDR Merge 5 - 1");
Mat w = Mat::zeros(size, CV_32F);
for(int c = 0; c < channels; c++) {
LUT(splitted[c], weights, splitted[c]);
w += splitted[c];
}
w /= channels;
ALOGD("HDR Merge 5 - 2");
Mat response_img;
LUT(images[i], log_response, response_img);
split(response_img, splitted);
// #pragma omp parallel for num_threads(channels)
for(int c = 0; c < channels; c++) {
//这里崩溃
result_split[c] += w.mul(splitted[c] - exp_values.at<float>((int)i));
}
weight_sum += w;
ALOGD("HDR Merge 5 - 3");
}
ALOGD("HDR Merge 6");
weight_sum = 1.0f / weight_sum;
for(int c = 0; c < channels; c++) {
result_split[c] = result_split[c].mul(weight_sum);
}
ALOGD("HDR Merge 7");
merge(result_split, result);
exp(result, result);
ALOGD("HDR Merge 8");
}
void process(InputArrayOfArrays src, OutputArray dst, InputArray times) CV_OVERRIDE
{
// CV_INSTRUMENT_REGION();
process(src, dst, times, Mat());
}
protected:
String name;
Mat weights;
};
Ptr<MergeDebevecNew> createMergeDebevecNew()
{
return makePtr<MergeDebevecImplNew>();
}
class CV_EXPORTS_W TonemapReinhardNew
{
public:
CV_WRAP virtual void process(InputArray src, OutputArray dst) = 0;
virtual ~TonemapReinhardNew() {}
CV_WRAP virtual float getGamma() const = 0;
CV_WRAP virtual void setGamma(float gamma) = 0;
CV_WRAP virtual float getIntensity() const = 0;
CV_WRAP virtual void setIntensity(float intensity) = 0;
CV_WRAP virtual float getLightAdaptation() const = 0;
CV_WRAP virtual void setLightAdaptation(float light_adapt) = 0;
CV_WRAP virtual float getColorAdaptation() const = 0;
CV_WRAP virtual void setColorAdaptation(float color_adapt) = 0;
};
inline void log_(const Mat& src, Mat& dst)
{
max(src, Scalar::all(1e-4), dst);
log(dst, dst);
}
class TonemapReinhardImpl CV_FINAL : public TonemapReinhardNew
{
public:
TonemapReinhardImpl(float _gamma, float _intensity, float _light_adapt, float _color_adapt) :
name("TonemapReinhardNew"),
gamma(_gamma),
intensity(_intensity),
light_adapt(_light_adapt),
color_adapt(_color_adapt)
{
}
void process(InputArray _src, OutputArray _dst) CV_OVERRIDE
{
ALOGD("HDR 1 ");
Mat src = _src.getMat();
CV_Assert(!src.empty());
_dst.create(src.size(), CV_32FC3);
Mat img = _dst.getMat();
Ptr<Tonemap> linear = createTonemap(1.0f);
linear->process(src, img);
ALOGD("HDR 2 ");
Mat gray_img;
cvtColor(img, gray_img, COLOR_RGB2GRAY);
Mat log_img;
log_(gray_img, log_img);
float log_mean = static_cast<float>(sum(log_img)[0] / log_img.total());
double log_min, log_max;
minMaxLoc(log_img, &log_min, &log_max);
log_img.release();
ALOGD("HDR 3 ");
double key = static_cast<float>((log_max - log_mean) / (log_max - log_min));
float map_key = 0.3f + 0.7f * pow(static_cast<float>(key), 1.4f);
intensity = exp(-intensity);
Scalar chan_mean = mean(img);
float gray_mean = static_cast<float>(mean(gray_img)[0]);
std::vector<Mat> channels(3);
split(img, channels);
ALOGD("HDR 4 ");
#pragma omp parallel for num_threads(3)
for (int i = 0; i < 3; i++) {
float global = color_adapt * static_cast<float>(chan_mean[i]) + (1.0f - color_adapt) * gray_mean;
Mat adapt = color_adapt * channels[i] + (1.0f - color_adapt) * gray_img;
adapt = light_adapt * adapt + (1.0f - light_adapt) * global;
pow(intensity * adapt, map_key, adapt);
//这里崩溃
channels[i] = channels[i].mul(1.0f/(adapt + channels[i]));
}
gray_img.release();
merge(channels, img);
ALOGD("HDR 5 ");
linear->setGamma(gamma);
linear->process(img, img);
ALOGD("HDR 6 ");
}
float getGamma() const CV_OVERRIDE { return gamma; }
void setGamma(float val) CV_OVERRIDE { gamma = val; }
float getIntensity() const CV_OVERRIDE { return intensity; }
void setIntensity(float val) CV_OVERRIDE { intensity = val; }
float getLightAdaptation() const CV_OVERRIDE { return light_adapt; }
void setLightAdaptation(float val) CV_OVERRIDE { light_adapt = val; }
float getColorAdaptation() const CV_OVERRIDE { return color_adapt; }
void setColorAdaptation(float val) CV_OVERRIDE { color_adapt = val; }
void write(FileStorage& fs) const
{
#if 0
writeFormat(fs);
fs << "name" << name
<< "gamma" << gamma
<< "intensity" << intensity
<< "light_adapt" << light_adapt
<< "color_adapt" << color_adapt;
#endif
}
void read(const FileNode& fn)
{
#if 0
FileNode n = fn["name"];
CV_Assert(n.isString() && String(n) == name);
gamma = fn["gamma"];
intensity = fn["intensity"];
light_adapt = fn["light_adapt"];
color_adapt = fn["color_adapt"];
#endif
}
protected:
String name;
float gamma, intensity, light_adapt, color_adapt;
};
Ptr<TonemapReinhardNew> createTonemapReinhardNew(float gamma, float contrast, float sigma_color, float sigma_space)
{
return makePtr<TonemapReinhardImpl>(gamma, contrast, sigma_color, sigma_space);
}
};
bool AndroidBitmap_CompressWriteFile(void *userContext, const void *data, size_t size)
{
int file = (int)((size_t)userContext);
int bytesWritten = write(file, data, size);
return bytesWritten == size;
}
bool AndroidBitmap_CompressWriteBuffer(void *userContext, const void *data, size_t size)
{
std::vector<uint8_t>* buffer = (std::vector<uint8_t>*)userContext;
// int bytesWritten = write(file, data, size);
const uint8_t* pBytes = (const uint8_t*)data;
buffer->insert(buffer->cend(), pBytes, pBytes + size);
return true;
}
inline std::string jstring2string(JNIEnv *env, jstring jStr)
{
if (!jStr)
return "";
const jclass stringClass = env->GetObjectClass(jStr);
const jmethodID getBytes = env->GetMethodID(stringClass, "getBytes", "(Ljava/lang/String;)[B");
const jbyteArray stringJbytes = (jbyteArray) env->CallObjectMethod(jStr, getBytes, env->NewStringUTF("UTF-8"));
size_t length = (size_t) env->GetArrayLength(stringJbytes);
jbyte* pBytes = env->GetByteArrayElements(stringJbytes, NULL);
std::string ret = std::string((char *)pBytes, length);
env->ReleaseByteArrayElements(stringJbytes, pBytes, JNI_ABORT);
env->DeleteLocalRef(stringJbytes);
env->DeleteLocalRef(stringClass);
return ret;
}
bool makeHdr(std::vector<float>& times, std::vector<std::string>& paths, cv::Mat& rgb)
{
// Read images and exposure times
std::vector<cv::Mat> images;
images.resize(paths.size());
#pragma omp parallel for
for (int idx = 0; idx < paths.size(); idx++)
{
images[idx] = cv::imread(paths[idx].c_str());
}
// Align input images
// cout << "Aligning images ... " << endl;
cv::Ptr<cv::AlignMTB> alignMTB = cv::createAlignMTB();
#if 0
alignMTB->process(images, images);
#endif
// Obtain Camera Response Function (CRF)
// cout << "Calculating Camera Response Function (CRF) ... " << endl;
cv::Mat responseDebevec;
cv::Ptr<cv::CalibrateDebevec> calibrateDebevec = cv::createCalibrateDebevec();
calibrateDebevec->process(images, responseDebevec, times);
// Merge images into an HDR linear image
// cout << "Merging images into one HDR image ... ";
cv::Mat hdrDebevec;
cv::Ptr<cv::MergeDebevec> mergeDebevec = cv::createMergeDebevec();
mergeDebevec->process(images, hdrDebevec, times, responseDebevec);
// Save HDR image.
// imwrite((OUTPUT_DIR "hdrDebevec.hdr"), hdrDebevec);
// cout << "saved hdrDebevec.hdr " << endl;
{
std::vector<cv::Mat> empty;
empty.swap(images);
}
// Tonemap using Reinhard's method to obtain 24-bit color image
// cout << "Tonemaping using Reinhard's method ... ";
cv::Mat ldrReinhard;
cv::Ptr<cv::TonemapReinhard> tonemapReinhard = cv::createTonemapReinhard(1.5, 0, 0, 0);
tonemapReinhard->process(hdrDebevec, ldrReinhard);
hdrDebevec.release();
int type = ldrReinhard.type();
ldrReinhard = ldrReinhard * 255;
ldrReinhard.convertTo(rgb, CV_8U);
ldrReinhard.release();
return true;
}
extern "C"
JNIEXPORT jboolean JNICALL
Java_com_xypower_mppreview_Camera2RawFragment_makeHdr3(JNIEnv *env, jclass clazz,
jlong exposureTime1, jobject img1, jint length1,
jlong exposureTime2, jobject img2, jint length2,
jstring outputPath) {
ALOGI("Start HDR3");
std::vector<cv::Mat> images;
images.resize(2);
std::vector<jobject> bitmaps;
bitmaps.push_back(img1);
bitmaps.push_back(img2);
ALOGI("Start Decode");
// omp_set_num_threads(2);
// #pragma omp parallel for num_threads(2)
for (int idx = 0; idx < 2; idx++)
{
AndroidBitmapInfo bmpInfo = { 0 };
int result = AndroidBitmap_getInfo(env, bitmaps[idx], &bmpInfo);
if ((ANDROID_BITMAP_FLAGS_IS_HARDWARE & bmpInfo.flags) == ANDROID_BITMAP_FLAGS_IS_HARDWARE)
{
#if 0
AHardwareBuffer* hardwareBuffer = NULL;
result = AndroidBitmap_getHardwareBuffer(env, bitmaps[idx], &hardwareBuffer);
void* outVirtualAddress = NULL;
int32_t fence = -1;
result = AHardwareBuffer_lock(hardwareBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_RARELY, fence, NULL, &outVirtualAddress);
cv::Mat tmp(bmpInfo.height, bmpInfo.width, CV_8UC4, outVirtualAddress);
tmp.copyTo(images[idx]);
AHardwareBuffer_unlock(hardwareBuffer, &fence);
AHardwareBuffer_release(hardwareBuffer);
#endif
}
else
{
void* outAddress = NULL;
result = AndroidBitmap_lockPixels(env, bitmaps[idx], &outAddress);
cv::Mat tmp(bmpInfo.height, bmpInfo.width, CV_8UC4, outAddress);
tmp.copyTo(images[idx]);
AndroidBitmap_unlockPixels(env, bitmaps[idx]);
tmp.release();
}
//convert RGB to BGR
cv::cvtColor(images[idx], images[idx], cv::COLOR_RGB2BGR);
}
bitmaps.clear();
env->DeleteLocalRef(img1);
env->DeleteLocalRef(img2);
ALOGI("End Decode");
cv::Mat rgb;
std::vector<float> times;
times.push_back((double)(exposureTime1) / 1000000000.0);
times.push_back((double)(exposureTime2) / 1000000000.0);
ALOGI("Start MakeHDR3");
makeHdr(times, images, rgb);
ALOGI("End MakeHDR3");
std::string fileName = jstring2string(env, outputPath);
std::vector<int> params;
params.push_back(cv::IMWRITE_JPEG_QUALITY);
params.push_back(100);
if (cv::imwrite(fileName.c_str(), rgb, params))
{
rgb.release();
// images[0].release();
// images[1].release();
// images.clear();
ALOGI("End HDR3");
return JNI_TRUE;
}
// env->DeleteGlobalRef(img1);
// env->DeleteGlobalRef(img2);
return JNI_FALSE;
}
extern "C"
JNIEXPORT jboolean JNICALL
Java_com_xypower_mppreview_Camera2RawFragment_decodeDng(JNIEnv *env, jclass clazz,
jobject byte_buffer, jstring output_path) {
// TODO: implement decodeDng()
}
extern "C"
JNIEXPORT jboolean JNICALL
Java_com_xypower_mppreview_Camera2RawFragment_makeHdr5(JNIEnv *env, jclass clazz,
jstring compFile,
jlong exposureTime1, jstring img1,
jlong exposureTime2, jstring img2,
jstring outputPath) {
std::string compFilePath = jstring2string(env, compFile);
std::string outputFile = jstring2string(env, outputPath);
std::vector<std::string> inputFiles;
std::vector<float> exposureValues;
inputFiles.push_back(jstring2string(env, img1));
inputFiles.push_back(jstring2string(env, img2));
exposureValues.push_back((double)(exposureTime1) / 1000000000.0);
exposureValues.push_back((double)(exposureTime2) / 1000000000.0);
if (inputFiles.empty()) {
// std::cerr << "No input BMP files specified" << std::endl;
return JNI_FALSE;
}
// std::cout << "Processing " << inputFiles.size() << " images..." << std::endl;
for (size_t i = 0; i < inputFiles.size(); i++) {
// std::cout << " " << inputFiles[i] << " (exposure: " << exposureValues[i] << ")" << std::endl;
}
try {
// Compile shader if needed (in a production app, you'd precompile these)
#ifdef _WIN32
// system("glslc.exe shaders/hdr_merge.comp -o shaders/hdr_merge.comp.spv");
#else
// system("glslc shaders/hdr_merge.comp -o shaders/hdr_merge.comp.spv");
#endif
// Create HDR generator and process images
VulkanHdrGenerator generator(compFilePath);
// Process with 256x256 tiles to keep memory usage low
bool success = generator.generateHdr(inputFiles, outputFile, exposureValues, 256, 256);
if (success) {
// std::cout << "HDR image successfully created: " << outputFile << std::endl;
return JNI_TRUE;
} else {
// std::cerr << "Failed to generate HDR image" << std::endl;
return JNI_FALSE;
}
} catch (const std::exception& e)
{
ALOGE("Error: %s", e.what());
return JNI_FALSE;
}
return JNI_FALSE;
}

@ -0,0 +1,68 @@
#include <jni.h>
#include <string>
#include <vector>
#include <fcntl.h>
#include <unistd.h>
#include <omp.h>
#include <android/imagedecoder.h>
#include <android/log.h>
#include <media/NdkImage.h>
#include <opencv2/opencv.hpp>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#include "hdr.h"
int main( int argc, char** argv )
{
if (argc != 7)
{
return -1;
}
std::string outputPath = argv[1];
std::string tmpFilePath = argv[2];
std::vector<float> times;
times.push_back((double)(atoi(argv[3])) / 1000000000.0);
times.push_back((double)(atoi(argv[5])) / 1000000000.0);
std::vector<std::string> paths;
paths.push_back(std::string(argv[4]));
paths.push_back(std::string(argv[6]));
std::vector<cv::Mat> images;
images.resize(2);
printf("Start Decode");
#pragma omp parallel for num_threads(2)
for (int idx = 0; idx < 2; idx++)
{
images[idx] = cv::imread(paths[idx].c_str());
}
printf("End Decode");
cv::Mat rgb;
printf("Start MakeHDR3");
makeHdr(times, images, rgb);
printf("End MakeHDR3");
std::vector<int> params;
params.push_back(cv::IMWRITE_JPEG_QUALITY);
params.push_back(100);
if (cv::imwrite(outputPath.c_str(), rgb, params))
{
printf("End HDR3");
return JNI_TRUE;
}
return 0;
}

@ -0,0 +1,25 @@
#include <jni.h>
#include <string>
#include <vector>
#include <fcntl.h>
#include <unistd.h>
#include <omp.h>
#include <android/imagedecoder.h>
#include <android/log.h>
#include <media/NdkImage.h>
#include <opencv2/opencv.hpp>
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#define HDR_TAG "HDR"
#define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, HDR_TAG,__VA_ARGS__)
#define ALOGI(...) __android_log_print(ANDROID_LOG_INFO, HDR_TAG,__VA_ARGS__)
#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, HDR_TAG, __VA_ARGS__)
#define ALOGW(...) __android_log_print(ANDROID_LOG_WARN, HDR_TAG, __VA_ARGS__)
#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, HDR_TAG,__VA_ARGS__)
bool makeHdr(std::vector<float>& times, std::vector<cv::Mat>& images, cv::Mat& rgb);

@ -0,0 +1,72 @@
#version 450
layout(local_size_x = 16, local_size_y = 16, local_size_z = 1) in;
// Input images packed into one buffer
layout(set = 0, binding = 0) buffer InputBuffer {
float data[];
} inputImages;
// Output HDR image
layout(set = 0, binding = 1) buffer OutputBuffer {
vec4 pixels[]; // Make sure this matches your C++ expectations
} outputImage;
// Check if parameter layout matches C++ struct
layout(set = 0, binding = 2) uniform Params {
uint imageCount;
uint width;
uint height;
float exposureValues[16]; // Must match C++ struct size
} params;
void main() {
uint x = gl_GlobalInvocationID.x;
uint y = gl_GlobalInvocationID.y;
if (x >= params.width || y >= params.height)
return;
uint pixelIndex = y * params.width + x;
uint pixelsPerImage = params.width * params.height;
// Debug RGB values for the first image
float r0 = inputImages.data[pixelIndex * 3];
float g0 = inputImages.data[pixelIndex * 3 + 1];
float b0 = inputImages.data[pixelIndex * 3 + 2];
// HDR merging logic - weighted average with exposure
vec3 hdrPixel = vec3(0.0);
float weightSum = 0.0;
for (uint i = 0; i < params.imageCount; i++) {
// FIXED: Correct buffer access pattern for packed images
uint baseOffset = i * pixelsPerImage * 3;
float r = inputImages.data[baseOffset + pixelIndex * 3 + 0];
float g = inputImages.data[baseOffset + pixelIndex * 3 + 1];
float b = inputImages.data[baseOffset + pixelIndex * 3 + 2];
vec3 rgb = vec3(r, g, b);
// Calculate luminance for weighting
float lum = dot(rgb, vec3(0.2126, 0.7152, 0.0722));
// Well-exposed pixels get higher weight
float weight = 1.0 - pow(abs(lum - 0.5) * 2.0, 2.0);
weight = max(weight, 0.001);
// Apply exposure value
float exposureFactor = params.exposureValues[i];
hdrPixel += rgb * weight * exposureFactor;
weightSum += weight;
}
// Apply stronger exposure boost directly in the shader
// hdrPixel *= 2.5; // Boost by 2.5x
// Normalize
hdrPixel = hdrPixel / max(weightSum, 0.001);
// Store the result
outputImage.pixels[pixelIndex] = vec4(hdrPixel, 1.0);
}

File diff suppressed because it is too large Load Diff

@ -0,0 +1,734 @@
//
// Created by Matthew on 2025/3/22.
//
#include "vulkan_hdr_generator.h"
#include "vulkan_hdr_generator.h"
#include "HdrWriter.h"
#include <stdexcept>
#include <fstream>
#include <iostream>
#include <algorithm>
#include <cstring>
#include <jni.h>
#include <android/log.h>
#define MP_TAG "VOLKAN"
#define ALOGV(...) __android_log_print(ANDROID_LOG_VERBOSE, MP_TAG,__VA_ARGS__)
#define ALOGI(...) __android_log_print(ANDROID_LOG_INFO, MP_TAG,__VA_ARGS__)
#define ALOGD(...) __android_log_print(ANDROID_LOG_DEBUG, MP_TAG, __VA_ARGS__)
#define ALOGW(...) __android_log_print(ANDROID_LOG_WARN, MP_TAG, __VA_ARGS__)
#define ALOGE(...) __android_log_print(ANDROID_LOG_ERROR, MP_TAG,__VA_ARGS__)
VulkanHdrGenerator::VulkanHdrGenerator(const std::string& compFile) : compFilePath(compFile) {
// Add initializers for all Vulkan handles
instance = VK_NULL_HANDLE;
physicalDevice = VK_NULL_HANDLE;
device = VK_NULL_HANDLE;
computeQueue = VK_NULL_HANDLE;
commandPool = VK_NULL_HANDLE;
descriptorPool = VK_NULL_HANDLE;
commandBuffer = VK_NULL_HANDLE;
inputBuffer = VK_NULL_HANDLE;
inputBufferMemory = VK_NULL_HANDLE;
outputBuffer = VK_NULL_HANDLE;
outputBufferMemory = VK_NULL_HANDLE;
paramsBuffer = VK_NULL_HANDLE;
paramsBufferMemory = VK_NULL_HANDLE;
computeShaderModule = VK_NULL_HANDLE;
computePipeline = VK_NULL_HANDLE;
pipelineLayout = VK_NULL_HANDLE;
descriptorSetLayout = VK_NULL_HANDLE;
descriptorSet = VK_NULL_HANDLE;
setupVulkan();
createComputeResources();
}
VulkanHdrGenerator::~VulkanHdrGenerator() {
cleanup();
}
void VulkanHdrGenerator::setupVulkan() {
// Create Vulkan instance
VkApplicationInfo appInfo{};
appInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
appInfo.pApplicationName = "HDR Generator";
appInfo.applicationVersion = VK_MAKE_VERSION(1, 0, 0);
appInfo.pEngineName = "No Engine";
appInfo.engineVersion = VK_MAKE_VERSION(1, 0, 0);
appInfo.apiVersion = VK_API_VERSION_1_0;
VkInstanceCreateInfo createInfo{};
createInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
createInfo.pApplicationInfo = &appInfo;
if (vkCreateInstance(&createInfo, nullptr, &instance) != VK_SUCCESS) {
throw std::runtime_error("Failed to create Vulkan instance");
}
// Select physical device
uint32_t deviceCount = 0;
vkEnumeratePhysicalDevices(instance, &deviceCount, nullptr);
if (deviceCount == 0) {
throw std::runtime_error("Failed to find GPUs with Vulkan support");
}
std::vector<VkPhysicalDevice> devices(deviceCount);
vkEnumeratePhysicalDevices(instance, &deviceCount, devices.data());
// Just use the first device
physicalDevice = devices[0];
// Find compute queue family index
computeQueueFamilyIndex = findComputeQueueFamily(physicalDevice);
// Create logical device
VkDeviceQueueCreateInfo queueCreateInfo{};
queueCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
queueCreateInfo.queueFamilyIndex = computeQueueFamilyIndex;
queueCreateInfo.queueCount = 1;
float queuePriority = 1.0f;
queueCreateInfo.pQueuePriorities = &queuePriority;
VkPhysicalDeviceFeatures deviceFeatures{};
VkDeviceCreateInfo deviceCreateInfo{};
deviceCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
deviceCreateInfo.pQueueCreateInfos = &queueCreateInfo;
deviceCreateInfo.queueCreateInfoCount = 1;
deviceCreateInfo.pEnabledFeatures = &deviceFeatures;
if (vkCreateDevice(physicalDevice, &deviceCreateInfo, nullptr, &device) != VK_SUCCESS) {
throw std::runtime_error("Failed to create logical device");
}
vkGetDeviceQueue(device, computeQueueFamilyIndex, 0, &computeQueue);
// Create command pool
VkCommandPoolCreateInfo poolInfo{};
poolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
poolInfo.queueFamilyIndex = computeQueueFamilyIndex;
poolInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
if (vkCreateCommandPool(device, &poolInfo, nullptr, &commandPool) != VK_SUCCESS) {
throw std::runtime_error("Failed to create command pool");
}
// Create descriptor pool
VkDescriptorPoolSize poolSizes[2];
poolSizes[0].type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
poolSizes[0].descriptorCount = 3; // Input, output, params
poolSizes[1].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
poolSizes[1].descriptorCount = 1;
VkDescriptorPoolCreateInfo descriptorPoolInfo{};
descriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
descriptorPoolInfo.poolSizeCount = 2;
descriptorPoolInfo.pPoolSizes = poolSizes;
descriptorPoolInfo.maxSets = 1;
if (vkCreateDescriptorPool(device, &descriptorPoolInfo, nullptr, &descriptorPool) != VK_SUCCESS) {
throw std::runtime_error("Failed to create descriptor pool");
}
// Allocate command buffer
VkCommandBufferAllocateInfo allocInfo{};
allocInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
allocInfo.commandPool = commandPool;
allocInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
allocInfo.commandBufferCount = 1;
if (vkAllocateCommandBuffers(device, &allocInfo, &commandBuffer) != VK_SUCCESS) {
throw std::runtime_error("Failed to allocate command buffer");
}
}
void VulkanHdrGenerator::createComputeResources() {
// Create descriptor set layout
VkDescriptorSetLayoutBinding bindings[3];
bindings[0].binding = 0;
bindings[0].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
bindings[0].descriptorCount = 1;
bindings[0].stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
bindings[0].pImmutableSamplers = nullptr;
bindings[1].binding = 1;
bindings[1].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
bindings[1].descriptorCount = 1;
bindings[1].stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
bindings[1].pImmutableSamplers = nullptr;
bindings[2].binding = 2;
bindings[2].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
bindings[2].descriptorCount = 1;
bindings[2].stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
bindings[2].pImmutableSamplers = nullptr;
VkDescriptorSetLayoutCreateInfo layoutInfo{};
layoutInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
layoutInfo.bindingCount = 3;
layoutInfo.pBindings = bindings;
if (vkCreateDescriptorSetLayout(device, &layoutInfo, nullptr, &descriptorSetLayout) != VK_SUCCESS) {
throw std::runtime_error("Failed to create descriptor set layout");
}
// Create pipeline layout
VkPipelineLayoutCreateInfo pipelineLayoutInfo{};
pipelineLayoutInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
pipelineLayoutInfo.setLayoutCount = 1;
pipelineLayoutInfo.pSetLayouts = &descriptorSetLayout;
if (vkCreatePipelineLayout(device, &pipelineLayoutInfo, nullptr, &pipelineLayout) != VK_SUCCESS) {
throw std::runtime_error("Failed to create pipeline layout");
}
// Load shader
std::vector<uint8_t> shaderCode = readFile(compFilePath);
VkShaderModule shaderModule = createShaderModule(shaderCode);
// Create compute pipeline
VkPipelineShaderStageCreateInfo shaderStageInfo{};
shaderStageInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
shaderStageInfo.stage = VK_SHADER_STAGE_COMPUTE_BIT;
shaderStageInfo.module = shaderModule;
shaderStageInfo.pName = "main";
VkComputePipelineCreateInfo pipelineInfo{};
pipelineInfo.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
pipelineInfo.stage = shaderStageInfo;
pipelineInfo.layout = pipelineLayout;
if (vkCreateComputePipelines(device, VK_NULL_HANDLE, 1, &pipelineInfo, nullptr, &computePipeline) != VK_SUCCESS) {
throw std::runtime_error("Failed to create compute pipeline");
}
vkDestroyShaderModule(device, shaderModule, nullptr);
}
void VulkanHdrGenerator::createBuffers(VkDeviceSize inputSize, VkDeviceSize outputSize) {
// Create input buffer
VkBufferCreateInfo inputBufferInfo{};
inputBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
inputBufferInfo.size = inputSize;
inputBufferInfo.usage = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
inputBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
if (vkCreateBuffer(device, &inputBufferInfo, nullptr, &inputBuffer) != VK_SUCCESS) {
throw std::runtime_error("Failed to create input buffer");
}
VkMemoryRequirements inputMemRequirements;
vkGetBufferMemoryRequirements(device, inputBuffer, &inputMemRequirements);
VkMemoryAllocateInfo inputAllocInfo{};
inputAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
inputAllocInfo.allocationSize = inputMemRequirements.size;
inputAllocInfo.memoryTypeIndex = findMemoryType(inputMemRequirements.memoryTypeBits,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
if (vkAllocateMemory(device, &inputAllocInfo, nullptr, &inputBufferMemory) != VK_SUCCESS) {
throw std::runtime_error("Failed to allocate input buffer memory");
}
vkBindBufferMemory(device, inputBuffer, inputBufferMemory, 0);
// Create output buffer
VkBufferCreateInfo outputBufferInfo{};
outputBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
outputBufferInfo.size = outputSize;
outputBufferInfo.usage = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
outputBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
if (vkCreateBuffer(device, &outputBufferInfo, nullptr, &outputBuffer) != VK_SUCCESS) {
throw std::runtime_error("Failed to create output buffer");
}
VkMemoryRequirements outputMemRequirements;
vkGetBufferMemoryRequirements(device, outputBuffer, &outputMemRequirements);
VkMemoryAllocateInfo outputAllocInfo{};
outputAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
outputAllocInfo.allocationSize = outputMemRequirements.size;
outputAllocInfo.memoryTypeIndex = findMemoryType(outputMemRequirements.memoryTypeBits,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
if (vkAllocateMemory(device, &outputAllocInfo, nullptr, &outputBufferMemory) != VK_SUCCESS) {
throw std::runtime_error("Failed to allocate output buffer memory");
}
vkBindBufferMemory(device, outputBuffer, outputBufferMemory, 0);
// Create params buffer
VkBufferCreateInfo paramsBufferInfo{};
paramsBufferInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
paramsBufferInfo.size = sizeof(HdrMergeParams);
paramsBufferInfo.usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
paramsBufferInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
if (vkCreateBuffer(device, &paramsBufferInfo, nullptr, &paramsBuffer) != VK_SUCCESS) {
throw std::runtime_error("Failed to create params buffer");
}
VkMemoryRequirements paramsMemRequirements;
vkGetBufferMemoryRequirements(device, paramsBuffer, &paramsMemRequirements);
VkMemoryAllocateInfo paramsAllocInfo{};
paramsAllocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
paramsAllocInfo.allocationSize = paramsMemRequirements.size;
paramsAllocInfo.memoryTypeIndex = findMemoryType(paramsMemRequirements.memoryTypeBits,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
if (vkAllocateMemory(device, &paramsAllocInfo, nullptr, &paramsBufferMemory) != VK_SUCCESS) {
throw std::runtime_error("Failed to allocate params buffer memory");
}
vkBindBufferMemory(device, paramsBuffer, paramsBufferMemory, 0);
}
void VulkanHdrGenerator::createDescriptorSet() {
// Allocate descriptor set
VkDescriptorSetAllocateInfo allocInfo{};
allocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
allocInfo.descriptorPool = descriptorPool;
allocInfo.descriptorSetCount = 1;
allocInfo.pSetLayouts = &descriptorSetLayout;
if (vkAllocateDescriptorSets(device, &allocInfo, &descriptorSet) != VK_SUCCESS) {
throw std::runtime_error("Failed to allocate descriptor set");
}
// Update descriptor set
VkDescriptorBufferInfo inputBufferInfo{};
inputBufferInfo.buffer = inputBuffer;
inputBufferInfo.offset = 0;
inputBufferInfo.range = VK_WHOLE_SIZE;
VkDescriptorBufferInfo outputBufferInfo{};
outputBufferInfo.buffer = outputBuffer;
outputBufferInfo.offset = 0;
outputBufferInfo.range = VK_WHOLE_SIZE;
VkDescriptorBufferInfo paramsBufferInfo{};
paramsBufferInfo.buffer = paramsBuffer;
paramsBufferInfo.offset = 0;
paramsBufferInfo.range = VK_WHOLE_SIZE;
VkWriteDescriptorSet descriptorWrites[3];
descriptorWrites[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
descriptorWrites[0].dstSet = descriptorSet;
descriptorWrites[0].dstBinding = 0;
descriptorWrites[0].dstArrayElement = 0;
descriptorWrites[0].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
descriptorWrites[0].descriptorCount = 1;
descriptorWrites[0].pBufferInfo = &inputBufferInfo;
descriptorWrites[0].pImageInfo = nullptr;
descriptorWrites[0].pTexelBufferView = nullptr;
descriptorWrites[0].pNext = nullptr;
descriptorWrites[1].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
descriptorWrites[1].dstSet = descriptorSet;
descriptorWrites[1].dstBinding = 1;
descriptorWrites[1].dstArrayElement = 0;
descriptorWrites[1].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
descriptorWrites[1].descriptorCount = 1;
descriptorWrites[1].pBufferInfo = &outputBufferInfo;
descriptorWrites[1].pImageInfo = nullptr;
descriptorWrites[1].pTexelBufferView = nullptr;
descriptorWrites[1].pNext = nullptr;
descriptorWrites[2].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
descriptorWrites[2].dstSet = descriptorSet;
descriptorWrites[2].dstBinding = 2;
descriptorWrites[2].dstArrayElement = 0;
descriptorWrites[2].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
descriptorWrites[2].descriptorCount = 1;
descriptorWrites[2].pBufferInfo = &paramsBufferInfo;
descriptorWrites[2].pImageInfo = nullptr;
descriptorWrites[2].pTexelBufferView = nullptr;
descriptorWrites[2].pNext = nullptr;
vkUpdateDescriptorSets(device, 3, descriptorWrites, 0, nullptr);
}
void VulkanHdrGenerator::processImageBatch(
const std::vector<std::vector<float>>& images,
std::vector<float>& outputData,
int32_t width, int32_t height,
const std::vector<float>& exposures) {
if (images.empty() || width <= 0 || height <= 0 || exposures.empty()) {
ALOGE("Invalid input parameters to processImageBatch");
return;
}
size_t imageCount = images.size();
size_t pixelCount = width * height;
size_t inputBufferSize = imageCount * pixelCount * 3 * sizeof(float);
size_t outputBufferSize = pixelCount * 4 * sizeof(float);
// Create buffers if they don't exist or need resizing
static VkDeviceSize lastInputSize = 0;
static VkDeviceSize lastOutputSize = 0;
if (lastInputSize != inputBufferSize || lastOutputSize != outputBufferSize) {
// Clean up old buffers if they exist
if (lastInputSize > 0) {
// Free descriptor set first before destroying resources it references
if (descriptorSet != VK_NULL_HANDLE) {
vkFreeDescriptorSets(device, descriptorPool, 1, &descriptorSet);
descriptorSet = VK_NULL_HANDLE;
}
vkDestroyBuffer(device, inputBuffer, nullptr);
vkFreeMemory(device, inputBufferMemory, nullptr);
vkDestroyBuffer(device, outputBuffer, nullptr);
vkFreeMemory(device, outputBufferMemory, nullptr);
vkDestroyBuffer(device, paramsBuffer, nullptr);
vkFreeMemory(device, paramsBufferMemory, nullptr);
}
createBuffers(inputBufferSize, outputBufferSize);
createDescriptorSet();
lastInputSize = inputBufferSize;
lastOutputSize = outputBufferSize;
}
// Upload input images
void* inputData;
vkMapMemory(device, inputBufferMemory, 0, inputBufferSize, 0, &inputData);
float* floatPtr = static_cast<float*>(inputData);
for (size_t i = 0; i < imageCount; i++) {
std::memcpy(
floatPtr + i * pixelCount * 3,
images[i].data(),
pixelCount * 3 * sizeof(float)
);
}
vkUnmapMemory(device, inputBufferMemory);
// Upload parameters
HdrMergeParams params{};
params.imageCount = static_cast<uint32_t>(imageCount);
params.width = width;
params.height = height;
for (size_t i = 0; i < imageCount && i < 16; i++) {
params.exposureValues[i] = exposures[i];
}
void* paramsData;
vkMapMemory(device, paramsBufferMemory, 0, sizeof(HdrMergeParams), 0, &paramsData);
std::memcpy(paramsData, &params, sizeof(HdrMergeParams));
vkUnmapMemory(device, paramsBufferMemory);
// Record and submit command buffer
VkCommandBufferBeginInfo beginInfo{};
beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
beginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
vkBeginCommandBuffer(commandBuffer, &beginInfo);
vkCmdBindPipeline(commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, computePipeline);
vkCmdBindDescriptorSets(commandBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, pipelineLayout, 0, 1, &descriptorSet, 0, nullptr);
uint32_t groupCountX = (width + 15) / 16;
uint32_t groupCountY = (height + 15) / 16;
vkCmdDispatch(commandBuffer, groupCountX, groupCountY, 1);
vkEndCommandBuffer(commandBuffer);
VkSubmitInfo submitInfo{};
submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
submitInfo.commandBufferCount = 1;
submitInfo.pCommandBuffers = &commandBuffer;
vkQueueSubmit(computeQueue, 1, &submitInfo, VK_NULL_HANDLE);
vkQueueWaitIdle(computeQueue);
// Debug input values - print first few pixels
#if 0
if (width > 0 && height > 0) {
for (size_t img = 0; img < std::min(images.size(), size_t(2)); img++) {
ALOGD("Image %zu first pixel: R=%.3f G=%.3f B=%.3f",
img,
images[img][0], // R
images[img][1], // G
images[img][2]); // B
}
}
#endif
// Download results
outputData.resize(pixelCount * 3); // We'll extract RGB and ignore alpha
void* outputMappedMemory;
vkMapMemory(device, outputBufferMemory, 0, outputBufferSize, 0, &outputMappedMemory);
float* outputFloats = static_cast<float*>(outputMappedMemory);
// Debug first few pixels of output
#if 0
ALOGD("Output first pixel: R=%.3f G=%.3f B=%.3f A=%.3f",
outputFloats[0], outputFloats[1], outputFloats[2], outputFloats[3]);
#endif
for (size_t i = 0; i < pixelCount; i++) {
outputData[i * 3 + 0] = outputFloats[i * 4 + 0]; // R
outputData[i * 3 + 1] = outputFloats[i * 4 + 1]; // G
outputData[i * 3 + 2] = outputFloats[i * 4 + 2]; // B
// Ignore alpha
}
vkUnmapMemory(device, outputBufferMemory);
}
// Add this function to VulkanHdrGenerator.cpp, just before the generateHdr method
void boostBrightness(std::vector<float>& imageData, float boost = 10.0f) {
if (imageData.empty()) return;
// Find current average brightness
float totalLum = 0.0f;
for (size_t i = 0; i < imageData.size(); i += 3) {
float r = imageData[i];
float g = imageData[i+1];
float b = imageData[i+2];
float lum = 0.2126f * r + 0.7152f * g + 0.0722f * b;
totalLum += lum;
}
float avgLum = totalLum / (imageData.size() / 3);
ALOGI("Original average luminance: %.4f", avgLum);
// Simple linear scaling without tone mapping
for (size_t i = 0; i < imageData.size(); ++i) {
imageData[i] *= boost;
}
}
bool VulkanHdrGenerator::generateHdr(
const std::vector<std::string>& bmpFiles,
const std::string& outputFile,
const std::vector<float>& exposureValues,
int32_t tileWidth,
int32_t tileHeight) {
if (bmpFiles.empty()) {
std::cerr << "No input BMP files specified" << std::endl;
return false;
}
// Check all images are the same dimensions
std::vector<BmpInfo> bmpInfos(bmpFiles.size());
for (size_t i = 0; i < bmpFiles.size(); i++) {
bmpInfos[i] = BmpLoader::readBmpInfo(bmpFiles[i]);
if (i > 0) {
if (bmpInfos[i].width != bmpInfos[0].width ||
bmpInfos[i].height != bmpInfos[0].height) {
std::cerr << "All BMP files must have the same dimensions" << std::endl;
return false;
}
}
}
int32_t imageWidth = bmpInfos[0].width;
int32_t imageHeight = bmpInfos[0].height;
// Adjust tile dimensions if necessary
tileWidth = std::min(tileWidth, imageWidth);
tileHeight = std::min(tileHeight, imageHeight);
// Output HDR data
std::vector<float> outputHdrData(imageWidth * imageHeight * 3);
// Process image in tiles
for (int32_t y = 0; y < imageHeight; y += tileHeight) {
#ifndef NDEBUG
ALOGI("Processing tile at Y=%d", y);
#endif
for (int32_t x = 0; x < imageWidth; x += tileWidth) {
int32_t currentTileWidth = std::min(tileWidth, imageWidth - x);
int32_t currentTileHeight = std::min(tileHeight, imageHeight - y);
// Load tile from each BMP
std::vector<std::vector<float>> tileData(bmpFiles.size());
for (size_t i = 0; i < bmpFiles.size(); i++) {
tileData[i] = BmpLoader::readBmpRegionAsFloat(
bmpFiles[i], bmpInfos[i], x, y, currentTileWidth, currentTileHeight);
}
// Process tile with Vulkan
std::vector<float> tileOutput;
processImageBatch(tileData, tileOutput, currentTileWidth, currentTileHeight, exposureValues);
// Copy tile data to output HDR
for (int32_t tileY = 0; tileY < currentTileHeight; tileY++) {
for (int32_t tileX = 0; tileX < currentTileWidth; tileX++) {
int32_t imagePixelIndex = ((y + tileY) * imageWidth + (x + tileX)) * 3;
int32_t tilePixelIndex = (tileY * currentTileWidth + tileX) * 3;
outputHdrData[imagePixelIndex + 0] = tileOutput[tilePixelIndex + 0];
outputHdrData[imagePixelIndex + 1] = tileOutput[tilePixelIndex + 1];
outputHdrData[imagePixelIndex + 2] = tileOutput[tilePixelIndex + 2];
}
}
}
}
// After all tiles are processed and before writing:
ALOGI("Applying brightness boost to final HDR image");
boostBrightness(outputHdrData, 30.0f); // Try a large value like 10.0
// Write output HDR
bool res = HdrWriter::writeRGB(outputFile, outputHdrData, (int)imageWidth, (int)imageHeight, HdrWriter::Format::BMP);
ALOGI("BMP saved");
return res;
}
uint32_t VulkanHdrGenerator::findComputeQueueFamily(VkPhysicalDevice device) {
uint32_t queueFamilyCount = 0;
vkGetPhysicalDeviceQueueFamilyProperties(device, &queueFamilyCount, nullptr);
std::vector<VkQueueFamilyProperties> queueFamilies(queueFamilyCount);
vkGetPhysicalDeviceQueueFamilyProperties(device, &queueFamilyCount, queueFamilies.data());
for (uint32_t i = 0; i < queueFamilyCount; i++) {
if (queueFamilies[i].queueFlags & VK_QUEUE_COMPUTE_BIT) {
return i;
}
}
throw std::runtime_error("Failed to find a compute queue family");
}
uint32_t VulkanHdrGenerator::findMemoryType(uint32_t typeFilter, VkMemoryPropertyFlags properties) {
VkPhysicalDeviceMemoryProperties memProperties;
vkGetPhysicalDeviceMemoryProperties(physicalDevice, &memProperties);
for (uint32_t i = 0; i < memProperties.memoryTypeCount; i++) {
if ((typeFilter & (1 << i)) &&
(memProperties.memoryTypes[i].propertyFlags & properties) == properties) {
return i;
}
}
throw std::runtime_error("Failed to find suitable memory type");
}
VkShaderModule VulkanHdrGenerator::createShaderModule(const std::vector<uint8_t>& code) {
VkShaderModuleCreateInfo createInfo{};
createInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
createInfo.codeSize = code.size();
createInfo.pCode = reinterpret_cast<const uint32_t*>(code.data());
VkShaderModule shaderModule;
if (vkCreateShaderModule(device, &createInfo, nullptr, &shaderModule) != VK_SUCCESS) {
throw std::runtime_error("Failed to create shader module");
}
return shaderModule;
}
std::vector<uint8_t> VulkanHdrGenerator::readFile(const std::string& filename) {
std::ifstream file(filename, std::ios::ate | std::ios::binary);
if (!file.is_open()) {
throw std::runtime_error("Failed to open file: " + filename);
}
size_t fileSize = static_cast<size_t>(file.tellg());
std::vector<uint8_t> buffer(fileSize);
file.seekg(0);
file.read(reinterpret_cast<char*>(buffer.data()), fileSize);
return buffer;
}
void VulkanHdrGenerator::cleanup() {
if (device != VK_NULL_HANDLE) {
vkDeviceWaitIdle(device);
// Destroy buffers if they were created
if (inputBuffer != VK_NULL_HANDLE) {
vkDestroyBuffer(device, inputBuffer, nullptr);
inputBuffer = VK_NULL_HANDLE;
}
if (inputBufferMemory != VK_NULL_HANDLE) {
vkFreeMemory(device, inputBufferMemory, nullptr);
inputBufferMemory = VK_NULL_HANDLE;
}
if (outputBuffer != VK_NULL_HANDLE) {
vkDestroyBuffer(device, outputBuffer, nullptr);
outputBuffer = VK_NULL_HANDLE;
}
if (outputBufferMemory != VK_NULL_HANDLE) {
vkFreeMemory(device, outputBufferMemory, nullptr);
outputBufferMemory = VK_NULL_HANDLE;
}
if (paramsBuffer != VK_NULL_HANDLE) {
vkDestroyBuffer(device, paramsBuffer, nullptr);
paramsBuffer = VK_NULL_HANDLE;
}
if (paramsBufferMemory != VK_NULL_HANDLE) {
vkFreeMemory(device, paramsBufferMemory, nullptr);
paramsBufferMemory = VK_NULL_HANDLE;
}
// Destroy other resources
if (descriptorPool != VK_NULL_HANDLE) {
vkDestroyDescriptorPool(device, descriptorPool, nullptr);
descriptorPool = VK_NULL_HANDLE;
}
if (descriptorSetLayout != VK_NULL_HANDLE) {
vkDestroyDescriptorSetLayout(device, descriptorSetLayout, nullptr);
descriptorSetLayout = VK_NULL_HANDLE;
}
if (computePipeline != VK_NULL_HANDLE) {
vkDestroyPipeline(device, computePipeline, nullptr);
computePipeline = VK_NULL_HANDLE;
}
if (pipelineLayout != VK_NULL_HANDLE) {
vkDestroyPipelineLayout(device, pipelineLayout, nullptr);
pipelineLayout = VK_NULL_HANDLE;
}
if (commandPool != VK_NULL_HANDLE) {
vkDestroyCommandPool(device, commandPool, nullptr);
commandPool = VK_NULL_HANDLE;
}
vkDestroyDevice(device, nullptr);
device = VK_NULL_HANDLE;
}
if (instance != VK_NULL_HANDLE) {
vkDestroyInstance(instance, nullptr);
instance = VK_NULL_HANDLE;
}
}

@ -0,0 +1,76 @@
//
// Created by Matthew on 2025/3/22.
//
#ifndef MPPREVIEW_VULKAN_HDR_GENERATOR_H
#define MPPREVIEW_VULKAN_HDR_GENERATOR_H
#include <vector>
#include <string>
#include <vulkan/vulkan.h>
#include "BmpLoader.h"
struct HdrMergeParams {
uint32_t imageCount;
uint32_t width;
uint32_t height;
float exposureValues[16]; // Support up to 16 images
};
class VulkanHdrGenerator {
private:
VkInstance instance;
VkPhysicalDevice physicalDevice;
VkDevice device;
VkQueue computeQueue;
VkCommandPool commandPool;
VkDescriptorPool descriptorPool;
VkCommandBuffer commandBuffer;
VkBuffer stagingBuffer;
VkDeviceMemory stagingBufferMemory;
VkBuffer inputBuffer = nullptr;
VkDeviceMemory inputBufferMemory;
VkBuffer outputBuffer;
VkDeviceMemory outputBufferMemory;
VkBuffer paramsBuffer;
VkDeviceMemory paramsBufferMemory;
VkShaderModule computeShaderModule;
VkPipeline computePipeline;
VkPipelineLayout pipelineLayout;
VkDescriptorSetLayout descriptorSetLayout;
VkDescriptorSet descriptorSet;
uint32_t computeQueueFamilyIndex;
std::string compFilePath;
void setupVulkan();
void createComputeResources();
void createBuffers(VkDeviceSize inputSize, VkDeviceSize outputSize);
void createDescriptorSet();
uint32_t findComputeQueueFamily(VkPhysicalDevice device);
uint32_t findMemoryType(uint32_t typeFilter, VkMemoryPropertyFlags properties);
void processImageBatch(const std::vector<std::vector<float>>& images,
std::vector<float>& outputData,
int32_t width, int32_t height,
const std::vector<float>& exposures);
VkShaderModule createShaderModule(const std::vector<uint8_t>& code);
std::vector<uint8_t> readFile(const std::string& filename);
void cleanup();
public:
VulkanHdrGenerator(const std::string& compFile);
~VulkanHdrGenerator();
// Generate HDR from multiple BMP files with tile-based processing
bool generateHdr(const std::vector<std::string>& bmpFiles,
const std::string& outputFile,
const std::vector<float>& exposureValues,
int32_t tileWidth = 256, int32_t tileHeight = 256);
};
#endif //MPPREVIEW_VULKAN_HDR_GENERATOR_H

@ -0,0 +1,30 @@
package com.xypower.mppreview;
import android.os.AsyncTask;
import java.util.concurrent.Executor;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
public class AsyncTaskWithCustomThreadPool extends AsyncTask<Void, Void, Void> {
private static final int CORE_POOL_SIZE = 2; // 自定义线程数
private static final int MAXIMUM_POOL_SIZE = 2; // 自定义线程数
private static final int KEEP_ALIVE = 3; // 自定义线程存活时间
public static final Executor THREAD_POOL_EXECUTOR
= new ThreadPoolExecutor(CORE_POOL_SIZE, MAXIMUM_POOL_SIZE, KEEP_ALIVE,
TimeUnit.SECONDS, new LinkedBlockingQueue<Runnable>());
@Override
protected Void doInBackground(Void... params) {
// 你的后台操作
return null;
}
public void executeTask() {
executeOnExecutor(THREAD_POOL_EXECUTOR);
}
}

@ -18,7 +18,6 @@ package com.xypower.mppreview;
import android.content.Context;
import android.util.AttributeSet;
import android.util.Log;
import android.view.TextureView;
/**
@ -53,15 +52,11 @@ public class AutoFitTextureView extends TextureView {
if (width < 0 || height < 0) {
throw new IllegalArgumentException("Size cannot be negative.");
}
Log.i("AutoFit", "setAspectRatio Width=" + width + " Height=" + height);
if (mRatioWidth == width && mRatioHeight == height) {
return;
}
mRatioWidth = width;
mRatioHeight = height;
requestLayout();
}
@ -70,17 +65,12 @@ public class AutoFitTextureView extends TextureView {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int width = MeasureSpec.getSize(widthMeasureSpec);
int height = MeasureSpec.getSize(heightMeasureSpec);
if (0 == mRatioWidth || 0 == mRatioHeight) {
Log.i("AutoFit", "0 Width=" + width + " Height=" + height);
setMeasuredDimension(width, height);
} else {
if (width < height * mRatioWidth / mRatioHeight) {
Log.i("AutoFit", "Width=" + width + " Height=" + ((int)(width * mRatioHeight / mRatioWidth)));
setMeasuredDimension(width, width * mRatioHeight / mRatioWidth);
} else {
Log.i("AutoFit", "Width=" + ((int)height * mRatioWidth / mRatioHeight) + " Height=" + height);
setMeasuredDimension(height * mRatioWidth / mRatioHeight, height);
}
}

@ -0,0 +1,133 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.xypower.mppreview;
import java.io.OutputStream;
import java.nio.ByteBuffer;
/**
* A ByteBuffer-backed OutputStream that expands the internal ByteBuffer as required. Given this, the caller should
* always access the underlying ByteBuffer via the {@link #buffer()} method until all writes are completed.
*
* This class is typically used for 2 purposes:
*
* 1. Write to a ByteBuffer when there is a chance that we may need to expand it in order to fit all the desired data
* 2. Write to a ByteBuffer via methods that expect an OutputStream interface
*
* Hard to track bugs can happen when this class is used for the second reason and unexpected buffer expansion happens.
* So, it's best to assume that buffer expansion can always happen. An improvement would be to create a separate class
* that throws an error if buffer expansion is required to avoid the issue altogether.
*/
public class ByteBufferOutputStream extends OutputStream {
private static final float REALLOCATION_FACTOR = 1.1f;
private final int initialCapacity;
private final int initialPosition;
private ByteBuffer buffer;
/**
* Creates an instance of this class that will write to the received `buffer` up to its `limit`. If necessary to
* satisfy `write` or `position` calls, larger buffers will be allocated so the {@link #buffer()} method may return
* a different buffer than the received `buffer` parameter.
*
* Prefer one of the constructors that allocate the internal buffer for clearer semantics.
*/
public ByteBufferOutputStream(ByteBuffer buffer) {
this.buffer = buffer;
this.initialPosition = buffer.position();
this.initialCapacity = buffer.capacity();
}
public ByteBufferOutputStream(int initialCapacity) {
this(initialCapacity, false);
}
public ByteBufferOutputStream(int initialCapacity, boolean directBuffer) {
this(directBuffer ? ByteBuffer.allocateDirect(initialCapacity) : ByteBuffer.allocate(initialCapacity));
}
public void write(int b) {
ensureRemaining(1);
buffer.put((byte) b);
}
public void write(byte[] bytes, int off, int len) {
ensureRemaining(len);
buffer.put(bytes, off, len);
}
public void write(ByteBuffer sourceBuffer) {
ensureRemaining(sourceBuffer.remaining());
buffer.put(sourceBuffer);
}
public ByteBuffer buffer() {
return buffer;
}
public int position() {
return buffer.position();
}
public int remaining() {
return buffer.remaining();
}
public int limit() {
return buffer.limit();
}
public void position(int position) {
ensureRemaining(position - buffer.position());
buffer.position(position);
}
/**
* The capacity of the first internal ByteBuffer used by this class. This is useful in cases where a pooled
* ByteBuffer was passed via the constructor and it needs to be returned to the pool.
*/
public int initialCapacity() {
return initialCapacity;
}
/**
* Ensure there is enough space to write some number of bytes, expanding the underlying buffer if necessary.
* This can be used to avoid incremental expansions through calls to {@link #write(int)} when you know how
* many total bytes are needed.
*
* @param remainingBytesRequired The number of bytes required
*/
public void ensureRemaining(int remainingBytesRequired) {
if (remainingBytesRequired > buffer.remaining())
expandBuffer(remainingBytesRequired);
}
private void expandBuffer(int remainingRequired) {
int expandSize = Math.max((int) (buffer.limit() * REALLOCATION_FACTOR), buffer.position() + remainingRequired);
ByteBuffer temp = ByteBuffer.allocate(expandSize);
int limit = limit();
buffer.flip();
temp.put(buffer);
buffer.limit(limit);
// reset the old buffer's position so that the partial data in the new buffer cannot be mistakenly consumed
// we should ideally only do this for the original buffer, but the additional complexity doesn't seem worth it
buffer.position(initialPosition);
buffer = temp;
}
}

@ -0,0 +1,64 @@
package com.xypower.mppreview;
import android.graphics.Matrix;
import android.graphics.Rect;
import android.graphics.RectF;
import android.hardware.camera2.CameraCharacteristics;
public class CoordinateTransformer {
private final Matrix mPreviewToCameraTransform;
private RectF mDriverRectF;
/**
* Convert rectangles to / from camera coordinate and preview coordinate space.
* @param chr camera characteristics
* @param previewRect the preview rectangle size and position.
*/
public CoordinateTransformer(CameraCharacteristics chr, RectF previewRect) {
if (!hasNonZeroArea(previewRect)) {
throw new IllegalArgumentException("previewRect");
}
Rect rect = chr.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
Integer sensorOrientation = chr.get(CameraCharacteristics.SENSOR_ORIENTATION);
int rotation = sensorOrientation == null ? 90 : sensorOrientation;
mDriverRectF = new RectF(rect);
Integer face = chr.get(CameraCharacteristics.LENS_FACING);
boolean mirrorX = face != null && face == CameraCharacteristics.LENS_FACING_FRONT;
mPreviewToCameraTransform = previewToCameraTransform(mirrorX, rotation, previewRect);
}
/**
* Transform a rectangle in preview view space into a new rectangle in
* camera view space.
* @param source the rectangle in preview view space
* @return the rectangle in camera view space.
*/
public RectF toCameraSpace(RectF source) {
RectF result = new RectF();
mPreviewToCameraTransform.mapRect(result, source);
return result;
}
private Matrix previewToCameraTransform(boolean mirrorX, int sensorOrientation,
RectF previewRect) {
Matrix transform = new Matrix();
// Need mirror for front camera.
transform.setScale(mirrorX ? -1 : 1, 1);
// Because preview orientation is different form sensor orientation,
// rotate to same orientation, Counterclockwise.
transform.postRotate(-sensorOrientation);
// Map rotated matrix to preview rect
transform.mapRect(previewRect);
// Map preview coordinates to driver coordinates
Matrix fill = new Matrix();
fill.setRectToRect(previewRect, mDriverRectF, Matrix.ScaleToFit.FILL);
// Concat the previous transform on top of the fill behavior.
transform.setConcat(fill, transform);
// finally get transform matrix
return transform;
}
private boolean hasNonZeroArea(RectF rect) {
return rect.width() != 0 && rect.height() != 0;
}
}

@ -0,0 +1,124 @@
//package com.xypower.mppreview;
//
//import static org.opencv.photo.Photo.createCalibrateDebevec;
//
//import org.opencv.android.OpenCVLoader;
//import org.opencv.core.Core;
//import org.opencv.core.CvType;
//import org.opencv.core.Mat;
//import org.opencv.core.MatOfFloat;
//import org.opencv.core.Scalar;
//import org.opencv.imgcodecs.Imgcodecs;
//import org.opencv.photo.CalibrateDebevec;
//import org.opencv.photo.Photo;
//
//import java.util.ArrayList;
//import java.util.Arrays;
//import java.util.List;
//
//public class HdrMergeExample {
//
// static {
// if (!OpenCVLoader.initDebug()) {
// // Handle initialization error
// }
// }
//
//// public Mat mergeHdrFrames(List<Mat> frames) {
//// Mat hdrImage = new Mat();
//// MatOfFloat response = new MatOfFloat();
////
//// // 假设frames是一个包含多个图像矩阵的列表
//// // 这里使用第一个图像作为基础,然后逐个调整其它图像的曝光
//// Mat baseFrame = frames.get(0);
//// for (int i = 1; i < frames.size(); i++) {
//// Mat frame = frames.get(i);
//// ExposureCompensator.create(ExposureCompensator.GAIN).apply(i, baseFrame.clone(), frame, response);
//// baseFrame.convertTo(baseFrame, -1, (float) Math.exp(-response.get(0, 0)), 0);
//// frame.convertTo(frame, -1, 1, 0);
//// Core.add(baseFrame, frame, baseFrame);
//// }
////
//// // 对结果进行归一化处理显示时转换为8位
//// Core.normalize(baseFrame, hdrImage, 0, 255, Core.NORM_MINMAX, CvType.CV_8UC1);
////
//// // 清理资源
//// response.release();
//// for (Mat frame : frames) {
//// frame.release();
//// }
////
//// return hdrImage;
//// }
// public Mat mergeHdrFrames(List<Mat> hdrFrames, float gamma) {
// // 假设所有帧尺寸相同
// Mat hdrImage = new Mat();
// Photo.createHdrMergeMertens(hdrFrames, hdrImage);
// Photo.createMergeDebevec(hdrFrames, hdrImage);
//
// // 应用伽马校正(可选)
// Core.pow(hdrImage, new Scalar(1d / gamma), hdrImage);
//
// // 归一化到[0, 255]
// Core.normalize(hdrImage, hdrImage, 0, 255, Core.NORM_MINMAX, CvType.CV_8UC1);
//
// return hdrImage;
// }
// // 示例使用方法
// public void runHdrMerge(String[] imagePaths) {
// List<Mat> frames = new ArrayList<>();
// for (String imagePath : imagePaths) {
// Mat frame = Imgcodecs.imread(imagePath, Imgcodecs.IMREAD_COLOR);
// if (frame.empty()) {
// // 图像加载失败处理
// }
// frames.add(frame);
// }
//
// Mat hdrImage = mergeHdrFrames(frames);
//
// // 保存或显示HDR图像
// Imgcodecs.imwrite("/path/to/save/hdr_image.jpg", hdrImage);
//
// // 清理资源
// hdrImage.release();
// }
//
//
// public Mat mergeRawImages(Mat[] images) {
// Mat mergedImage = new Mat();
// Core.merge(Arrays.asList(images), mergedImage);
// return mergedImage;
// }
//
// public Mat[] splitRawImages(Mat images) {
//
// // Obtain Camera Response Function (CRF) 提取相机响应函数
//// cout << "Calculating Camera Response Function (CRF) ... " << endl;
// Mat responseDebevec;
// CalibrateDebevec calibrateDebevec1 = createCalibrateDebevec();
// calibrateDebevec1.process(images, responseDebevec, times);
//
//
// Mat[] channels = new Mat[3];
// Core.split(image, Arrays.asList(channels));
// return channels;
// }
//
// public Mat applyHDR(Mat[] images) {
// Mat hdrImage = new Mat();
// // 假设所有图片都已经预先调整到相同的曝光度
// // 这里可以根据实际情况调整曝光度
// for (Mat image : images) {
// // 这里的1表示将图片增加相同的曝光度0表示减少
// // 根据需要调整增加的曝光度的强度
// ExposureCompensator.apply(1, image, hdrImage);
// }
// return hdrImage;
// }
//
// public void saveMergedImage(Mat mergedImage, String path) {
// Imgcodecs.imwrite(path, mergedImage);
// }
//
//}

@ -0,0 +1,243 @@
package com.xypower.mppreview;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.ImageDecoder;
import android.graphics.ImageFormat;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.DngCreator;
import android.media.Image;
import android.media.ImageReader;
import android.util.Log;
import androidx.annotation.NonNull;
import com.xypower.mppreview.bean.PngPhotoBean;
import com.xypower.mppreview.interfaces.CompleteCallback;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
public class ImageSaver implements Runnable {
private final static String TAG = "HDR";
private final Image mImage;
private final File mFile;
private final CaptureResult mCaptureResult;
private final CameraCharacteristics mCharacteristics;
private CompleteCallback mCallback;
private final ImagePair mImagePair;
private final Camera2RawFragment.RefCountedAutoCloseable<ImageReader> mReader;
public static class ImageInfo {
public long exposureTime;
public int length;
public Bitmap bitmap;
ImageInfo(Bitmap bmp, int length, long exp) {
this.exposureTime = exp;
this.length = length;
bitmap = bmp;
}
}
public static class ImagePair {
public List<ImageInfo> mImages;
public int mExpectedCount;
public Runnable mRunnable;
public ImagePair(int expectedCount) {
mImages = new ArrayList<>();
mExpectedCount = expectedCount;
mRunnable = null;
}
public void setRunnable(Runnable runnable) {
mRunnable = runnable;
}
public void addImage(Bitmap bitmap, int length, long exp) {
boolean isFull = false;
ImageInfo imageInfo = new ImageInfo(bitmap, length, exp);
synchronized (mImages) {
mImages.add(imageInfo);
isFull = (mImages.size() == mExpectedCount);
}
if (mRunnable != null && isFull) {
mRunnable.run();
}
}
public List<ImageInfo> getImages() {
return mImages;
}
}
public static abstract class ImagePairRunnable implements Runnable {
protected ImagePair mImagePair;
public ImagePairRunnable(ImagePair imagePair) {
mImagePair = imagePair;
}
}
private ArrayList<PngPhotoBean> mlist = new ArrayList<>();//用来存储已拍照的照片名称
public ImageSaver(Image image, File file, CaptureResult result, CameraCharacteristics characteristics,
Camera2RawFragment.RefCountedAutoCloseable<ImageReader> reader, ArrayList<PngPhotoBean> list,
CompleteCallback callback, ImagePair imagePair) {
mImage = image;
mFile = file;
mCaptureResult = result;
mCharacteristics = characteristics;
mReader = reader;
mlist = list;
mCallback = callback;
mImagePair = imagePair;
}
@Override
public void run() {
boolean success = false;
int format = mImage.getFormat();
switch (format) {
case ImageFormat.JPEG: {
ByteBuffer buffer = mImage.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
FileOutputStream output = null;
try {
output = new FileOutputStream(mFile);
output.write(bytes);
success = true;
} catch (IOException e) {
e.printStackTrace();
} finally {
mImage.close();
closeOutput(output);
}
break;
}
case ImageFormat.RAW_SENSOR: {
DngCreator dngCreator = new DngCreator(mCharacteristics, mCaptureResult);
if (mImagePair != null) {
ByteBuffer byteBuffer = null;
ByteBufferOutputStream baos = null;
Long t = mCaptureResult.get(CaptureResult.SENSOR_EXPOSURE_TIME);
try {
byteBuffer = ByteBuffer.allocateDirect(mImage.getWidth() * mImage.getHeight() * 2 + 81768);
baos = new ByteBufferOutputStream(byteBuffer);
Log.d(TAG, "Before Saving DNG Exp=" + t.toString());
dngCreator.writeImage(baos, mImage);
byteBuffer.limit(byteBuffer.position());
byteBuffer.flip();
Log.d(TAG, "After Saving DNG Exp=" + t.toString() + " size=" + byteBuffer.limit());
ImageDecoder.OnHeaderDecodedListener listener = new ImageDecoder.OnHeaderDecodedListener() {
@Override
public void onHeaderDecoded(@NonNull ImageDecoder decoder, @NonNull ImageDecoder.ImageInfo info, @NonNull ImageDecoder.Source source) {
// decoder.setAllocator(ImageDecoder.ALLOCATOR_SOFTWARE);
decoder.setAllocator(ImageDecoder.ALLOCATOR_HARDWARE);
// decoder.setTargetSize(4702, 3520);
}
};
Log.i(TAG, "Start Hardware Decoding Exp=" + t.toString() + " TID=" + Thread.currentThread().getId());
ImageDecoder imageDecoder = null;
Bitmap bmp = null;
// ImageDecoder.Source source = ImageDecoder.createSource(mFile);
ImageDecoder.Source source = ImageDecoder.createSource(byteBuffer);
try {
bmp = ImageDecoder.decodeBitmap(source, listener);
} catch (Exception ex) {
ex.printStackTrace();
}
Log.i(TAG, "End Hardware Decoding Exp=" + t.toString());
byteBuffer.clear();
byteBuffer = null;
mImagePair.addImage(bmp, 0, t.longValue());
// bmp.recycle();
// bmp = null;
success = true;
} catch (Exception e) {
e.printStackTrace();
} finally {
dngCreator.close();
mImage.close();
closeOutput(baos);
}
} else {
FileOutputStream output = null;
try {
output = new FileOutputStream(mFile);
dngCreator.writeImage(output, mImage);
success = true;
} catch (IOException e) {
e.printStackTrace();
} finally {
mImage.close();
dngCreator.close();
closeOutput(output);
}
}
break;
}
default: {
break;
}
}
mReader.close();
}
private static void closeOutput(OutputStream outputStream) {
if (null != outputStream) {
try {
outputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
// private static void clear() {
// if (null != outputStream) {
// try {
// outputStream.close();
// } catch (IOException e) {
// e.printStackTrace();
// }
// }
// }
// private void saveJpeg(Image image,String name) {
// Image.Plane[] planes = image.getPlanes();
// ByteBuffer buffer = planes[0].getBuffer();
// int pixelStride = planes[0].getPixelStride();
// int rowStride = planes[0].getRowStride();
// int rowPadding = rowStride - pixelStride * mWidth;
//
// Bitmap bitmap = Bitmap.createBitmap(mWidth + rowPadding / pixelStride, mHeight, Bitmap.Config.ARGB_8888);
// bitmap.copyPixelsFromBuffer(buffer);
// //bitmap.compress(Bitmap.CompressFormat.JPEG, 100, fos);
// ImageSaveUtil.saveBitmap2file(bitmap,getApplicationContext(),name);
//
// }
}

@ -0,0 +1,98 @@
package com.xypower.mppreview;
import android.content.Context;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureResult;
import android.media.Image;
import android.media.ImageReader;
import com.xypower.mppreview.bean.PngPhotoBean;
import com.xypower.mppreview.interfaces.CompleteCallback;
import java.io.File;
import java.util.ArrayList;
public class ImageSaverBuilder {
public Image mImage;
public File mFile;
public CaptureResult mCaptureResult;
public CameraCharacteristics mCharacteristics;
public Context mContext;
public Camera2RawFragment.RefCountedAutoCloseable<ImageReader> mReader;
public ImageSaver.ImagePair mImagePair;
private ArrayList<PngPhotoBean> mlist;
private CompleteCallback mCallback;
/**
* Construct a new ImageSaverBuilder using the given {@link Context}.
*
* @param context a {@link Context} to for accessing the
* {@link android.provider.MediaStore}.
*/
public ImageSaverBuilder(final Context context) {
mContext = context;
}
public synchronized ImageSaverBuilder setRefCountedReader(Camera2RawFragment.RefCountedAutoCloseable<ImageReader> reader) {
if (reader == null) throw new NullPointerException();
mReader = reader;
return this;
}
public synchronized ImageSaverBuilder setImage(final Image image) {
if (image == null) throw new NullPointerException();
mImage = image;
return this;
}
public synchronized ImageSaverBuilder setImagePair(final ImageSaver.ImagePair imagePair) {
if (imagePair == null) throw new NullPointerException();
mImagePair = imagePair;
return this;
}
public synchronized ImageSaverBuilder setFile(final File file) {
if (file == null) throw new NullPointerException();
mFile = file;
return this;
}
public synchronized ImageSaverBuilder setResult(final CaptureResult result) {
if (result == null) throw new NullPointerException();
mCaptureResult = result;
return this;
}
public synchronized ImageSaverBuilder setList(ArrayList<PngPhotoBean> list) {
if (list == null) throw new NullPointerException();
mlist = list;
return this;
}
public synchronized ImageSaverBuilder setCharacteristics(final CameraCharacteristics characteristics) {
if (characteristics == null) throw new NullPointerException();
mCharacteristics = characteristics;
return this;
}
public synchronized ImageSaverBuilder setCallback(CompleteCallback callback) {
mCallback = callback;
return this;
}
public synchronized ImageSaver buildIfComplete() {
if (!isComplete()) {
return null;
}
return new ImageSaver(mImage, mFile, mCaptureResult, mCharacteristics, mReader, mlist,mCallback, mImagePair);
}
public synchronized String getSaveLocation() {
return (mFile == null) ? "Unknown" : mFile.toString();
}
private boolean isComplete() {
return mImage != null && mFile != null && mCaptureResult != null && mCharacteristics != null;
}
}

@ -0,0 +1,310 @@
package com.xypower.mppreview;
import static java.lang.System.loadLibrary;
import androidx.activity.result.ActivityResult;
import androidx.activity.result.ActivityResultCallback;
import androidx.activity.result.ActivityResultLauncher;
import androidx.activity.result.contract.ActivityResultContracts;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import androidx.recyclerview.widget.GridLayoutManager;
import android.Manifest;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraManager;
import android.net.ConnectivityManager;
import android.net.Network;
import android.net.NetworkCapabilities;
import android.net.NetworkRequest;
import android.os.Bundle;
import android.os.Handler;
import android.util.Log;
import android.view.View;
import android.widget.AdapterView;
import android.widget.Button;
import android.widget.Toast;
import com.xypower.mppreview.adapter.ItemAdapter;
import com.xypower.mppreview.interfaces.OnItemClickListener;
import com.xypower.mppreview.ui.CameraActivity;
import com.xypower.mppreview.ui.CameraChannelActivity;
import com.xypower.mppreview.utils.HdrUtil;
import com.xypower.mppreview.utils.PhotoUtil;
import com.xypower.mppreview.bean.Contants;
import com.xypower.mppreview.utils.CameraUtils;
import java.io.File;
import java.util.ArrayList;
public class MainActivity extends AppCompatActivity implements View.OnClickListener, AdapterView.OnItemSelectedListener, OnItemClickListener {
static {
loadLibrary("mppreview");
}
// static {
// if (!OpenCVLoader.initDebug()) {
// Log.e("OpenCV", "Initialization failed");
// } else {
// Log.d("OpenCV", "Initialization succeeded");
// }
// }
private static int MY_PERMISSIONS_REQUEST_FOREGROUND_SERVICE = 100;
public static int ExposureComp = 0;
private Button systakepic;
private Button hdrtakepic;
private ActivityResultLauncher<Intent> photoResultLauncher;
private int picsize = 0;
private com.xypower.mppreview.databinding.ActivityMainBinding viewBinding;
private int numberOfCameras;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
viewBinding = com.xypower.mppreview.databinding.ActivityMainBinding.inflate(getLayoutInflater());
setContentView(viewBinding.getRoot());
initView();
initActivityResult();
String[] accessPermissions = new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE, Manifest.permission.FOREGROUND_SERVICE, Manifest.permission.READ_PHONE_STATE,
/*Manifest.permission.PACKAGE_USAGE_STATS,*/
/*Manifest.permission.SET_TIME,*/};
boolean needRequire = false;
for (String access : accessPermissions) {
int curPermission = ActivityCompat.checkSelfPermission(MainActivity.this, access);
if (curPermission != PackageManager.PERMISSION_GRANTED) {
needRequire = true;
break;
}
}
if (needRequire) {
ActivityCompat.requestPermissions(MainActivity.this, accessPermissions, MY_PERMISSIONS_REQUEST_FOREGROUND_SERVICE);
// return;
}
File file = this.getFilesDir();
String path = "/sdcard/com.xypower.mppreview/";
file = new File(path);
if (!file.exists()) {
file.mkdirs();
}
}
private void initView() {
viewBinding.hdrtakepic.setOnClickListener(this);
viewBinding.systakepic.setOnClickListener(this);
viewBinding.spinner.setOnItemSelectedListener(this);
numberOfCameras = CameraUtils.getNumberOfCameras(this);
ItemAdapter itemAdapter = new ItemAdapter(numberOfCameras);
itemAdapter.setOnClickListener(this);
viewBinding.recyclerView.setAdapter(itemAdapter);
viewBinding.recyclerView.setLayoutManager(new GridLayoutManager(this,3));
viewBinding.btnTest.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
final File outputFile = new File(getFilesDir(), "out.bmp");
final File shadersPath = new File(getFilesDir(), "shaders");
if (!shadersPath.exists()) {
shadersPath.mkdirs();
}
if (outputFile.exists()) {
outputFile.delete();
}
HdrUtil.copyAssetsDir(getApplicationContext(), "shaders", shadersPath.getAbsolutePath());
final File f1 = new File(getFilesDir(), "5.bmp");
final File f2 = new File(getFilesDir(), "6.bmp");
if (f1.exists()) {
f1.delete();
}
if (f2.exists()) {
f2.delete();
}
HdrUtil.copyAssetsFile(getApplicationContext(), "5.bmp", f1.getAbsolutePath());
HdrUtil.copyAssetsFile(getApplicationContext(), "6.bmp", f2.getAbsolutePath());
f1.setReadable(true);
Thread th = new Thread(new Runnable() {
@Override
public void run() {
Boolean bigger = true;
if (bigger) {
File out = new File("/sdcard/com.xypower.mpapp/tmp/out.bmp");
Camera2RawFragment.makeHdr5(shadersPath.getAbsolutePath() + "/hdr_merge.comp.spv", 1000000, "/sdcard/com.xypower.mpapp/tmp/0.bmp", 4000000,
"/sdcard/com.xypower.mpapp/tmp/4.bmp",
out.getAbsolutePath());
} else {
File f11 = new File("/sdcard/com.xypower.mpapp/tmp/IMG_20250323_104953_582.bmp");
File f12 = new File("/sdcard/com.xypower.mpapp/tmp/IMG_20250323_104954_960.bmp");
if (!f11.exists()) {
int aa = 0;
}
if (!f12.exists()) {
int aa = 0;
}
Camera2RawFragment.makeHdr5(shadersPath .getAbsolutePath() + "/hdr_merge.comp.spv",
200000000, f11.getAbsolutePath(),
200000000, f12.getAbsolutePath(),
"/sdcard/com.xypower.mpapp/tmp/291out.bmp");
}
}
});
th.start();
}
});
// initNetWork();
Handler handler = new Handler();
handler.postDelayed(new Runnable() {
@Override
public void run() {
viewBinding.btnTest.performClick();
}
}, 1000);
}
private void initNetWork() {
ConnectivityManager connectivityManager = (ConnectivityManager) getSystemService(Context.CONNECTIVITY_SERVICE);
ConnectivityManager.NetworkCallback networkCallback = new ConnectivityManager.NetworkCallback() {
@Override
public void onAvailable(Network network) {
super.onAvailable(network);
NetworkCapabilities capabilities = connectivityManager.getNetworkCapabilities(network);
if (capabilities != null && capabilities.hasTransport(NetworkCapabilities.TRANSPORT_ETHERNET)) {
Log.d("NetworkCallback", "Ethernet connected");
// 以太网已连接
viewBinding.hdrhint.setText("已连接");
// RouteManager.addRoute("192.168.68.0/24", "eth0");
}
}
@Override
public void onLost(Network network) {
super.onLost(network);
Log.d("NetworkCallback", "Network lost");
// 网络断开
viewBinding.hdrhint.setText("已断开");
}
};
NetworkRequest request = new NetworkRequest.Builder()
.addTransportType(NetworkCapabilities.TRANSPORT_ETHERNET)
.build();
connectivityManager.registerNetworkCallback(request, networkCallback);
}
private void initActivityResult() {
photoResultLauncher = registerForActivityResult(new ActivityResultContracts.StartActivityForResult(), new ActivityResultCallback<ActivityResult>() {
@Override
public void onActivityResult(ActivityResult result) {
}
});
}
@Override
protected void onResume() {
super.onResume();
// if (OpenCVLoader.initDebug()) {
// Log.d("dfsdfd", "成功");
// } else {
// Log.d("dfsdfd", "失败");
// }
}
private void restartFragment() {
getSupportFragmentManager().beginTransaction().replace(R.id.container, Camera2RawFragment.newInstance()).commit();
}
private void doFocus() {
Camera2RawFragment fragment = (Camera2RawFragment) getSupportFragmentManager().findFragmentById(R.id.container);
fragment.doFocus();
}
private void takePicture() {
Camera2RawFragment fragment = (Camera2RawFragment) getSupportFragmentManager().findFragmentById(R.id.container);
fragment.takePicture();
}
@Override
public void onClick(View view) {
int id = view.getId();
switch (id) {
case R.id.hdrtakepic:
Intent intent = new Intent(this, CameraActivity.class);
intent.putExtra(Contants.HDRNUM, picsize);
startActivity(intent);
break;
case R.id.systakepic:
PhotoUtil.openCamera(this, photoResultLauncher);
break;
// case R.id.channel1:
// openChannelActivity(0);
// break;
// case R.id.channel2:
// openChannelActivity(1);
// break;
// case R.id.channel3:
// openChannelActivity(2);
// break;
// case R.id.channel4:
// openChannelActivity(3);
// break;
// case R.id.channel5:
// openChannelActivity(4);
// break;
// case R.id.channel6:
// openChannelActivity(5);
// break;
}
}
public static int getNumberOfCameras(Context context) {
CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
try {
String[] cameraIds = manager.getCameraIdList();
return cameraIds.length;
} catch (CameraAccessException e) {
e.printStackTrace();
return 0; // 如果发生异常返回0个摄像头
}
}
public void openChannelActivity(int channel) {
Intent intent = new Intent(this, CameraChannelActivity.class);
intent.putExtra(Contants.CAMERAID, channel);
startActivity(intent);
}
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
picsize = position;
}
@Override
public void onNothingSelected(AdapterView<?> parent) {
}
@Override
public void onItemClick(View v, int position) {
openChannelActivity(position);
}
}

@ -0,0 +1,21 @@
package com.xypower.mppreview;
import android.annotation.SuppressLint;
import android.util.Log;
import androidx.annotation.NonNull;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.ImageProxy;
import com.xypower.mppreview.bean.Contants;
import java.util.Objects;
public class MyAnalyzer implements ImageAnalysis.Analyzer{
@SuppressLint("UnsafeOptInUsageError")
@Override
public void analyze(@NonNull ImageProxy image) {
Log.d(Contants.TAG, "Image's stamp is " + Objects.requireNonNull(image.getImage()).getTimestamp());
image.close();
}
}

@ -0,0 +1,103 @@
package com.xypower.mppreview;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageDecoder;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
public class RawToJpgConverter {
public static void closeFriendly(Closeable closeable) {
if (null != closeable) {
try {
closeable.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
public static List<String> listFiles(File directory) {
ArrayList list = new ArrayList();
File[] files = directory.listFiles();
if (files != null) {
for (File file : files) {
if (file.isDirectory()) {
listFiles(file); // 递归遍历子目录
} else {
// 这里可以处理文件,例如打印文件名
list.add(file.getName());
}
}
}
return list;
}
public static void convertDngToPng(String dngFile, String pngFile) {
ImageDecoder.Source src = ImageDecoder.createSource(new File(dngFile));
Bitmap bmp = null;
FileOutputStream output = null;
try {
bmp = ImageDecoder.decodeBitmap(src);
output = new FileOutputStream(new File(pngFile));
bmp.compress(Bitmap.CompressFormat.PNG, 100, output);
} catch (Exception ex) {
ex.printStackTrace();
} finally {
closeFriendly(output);
if (bmp != null) {
bmp.recycle();
}
}
}
public static Bitmap convertRawToJpg(String path, String outpath) throws IOException {
// 1. 读取raw资源到字节数组
InputStream inputStream = new FileInputStream(path);
byte[] rawData = inputStreamToByteArray(inputStream);
// 2. 将字节数组解码为Bitmap
Bitmap rawBitmap = BitmapFactory.decodeByteArray(rawData, 0, rawData.length);
// 3. 如果需要可以在这里对Bitmap进行处理
// 4. 创建一个新的字节数组输出流用于存储JPG格式的图像数据
FileOutputStream outputStream = new FileOutputStream(outpath);
ByteArrayOutputStream bytestream = new ByteArrayOutputStream();
// 5. 将Bitmap以JPG格式编码到输出流中
rawBitmap.compress(Bitmap.CompressFormat.PNG, 100, bytestream);
// 6. 将输出流转换为字节数组,并关闭输出流
byte[] jpgData = bytestream.toByteArray();
outputStream.write(jpgData);
outputStream.close();
// 7. 返回JPG格式的Bitmap
return BitmapFactory.decodeByteArray(jpgData, 0, jpgData.length);
}
private static byte[] inputStreamToByteArray(InputStream inputStream) throws IOException {
byte[] buffer = new byte[1024];
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
int len;
while ((len = inputStream.read(buffer)) != -1) {
byteArrayOutputStream.write(buffer, 0, len);
}
byte[] data = byteArrayOutputStream.toByteArray();
byteArrayOutputStream.close();
inputStream.close();
return data;
}
}

@ -0,0 +1,66 @@
package com.xypower.mppreview.adapter;
import android.annotation.SuppressLint;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import androidx.annotation.NonNull;
import androidx.recyclerview.widget.RecyclerView;
import com.xypower.mppreview.R;
import com.xypower.mppreview.interfaces.OnItemClickListener;
public class ItemAdapter extends RecyclerView.Adapter<ItemAdapter.MyViewHolder> {
private Integer count;
public OnItemClickListener listener;
public class MyViewHolder extends RecyclerView.ViewHolder implements View.OnClickListener {
private final TextView title;
private OnItemClickListener listeners;
public MyViewHolder(View view, OnItemClickListener listener) {
super(view);
this.listeners = listener;
title = view.findViewById(R.id.channel);
title.setOnClickListener(this);
}
public TextView getTitle() {
return title;
}
@Override
public void onClick(View v) {
if (listeners != null) {
listeners.onItemClick(v, getAdapterPosition());
}
}
}
public ItemAdapter(Integer itemcount) {
this.count = itemcount;
}
public void setOnClickListener(OnItemClickListener listener) {
this.listener = listener;
}
@NonNull
@Override
public MyViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.item_view, parent, false);
return new MyViewHolder(view,listener);
}
@Override
public void onBindViewHolder(@NonNull MyViewHolder holder, @SuppressLint("RecyclerView") int position) {
// Integer item = itemList.get(position);
holder.getTitle().setText("通道"+(position+1));
}
@Override
public int getItemCount() {
return count;
}
}

@ -0,0 +1,8 @@
package com.xypower.mppreview.bean;
public class Contants {
public static final String TAG = "MpPriview";
public static final String HDRNUM = "hdrnum";
public static final String CAMERAID = "CAMERAID";
public static final String FILENAME_FORMAT = "yyyy-MM-dd-HH-mm-ss-SSS";
}

@ -0,0 +1,23 @@
package com.xypower.mppreview.bean;
public class PngPhotoBean {
private long etime;
private String path;
public long getEtime() {
return etime;
}
public void setEtime(long etime) {
this.etime = etime;
}
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
}

@ -0,0 +1,5 @@
package com.xypower.mppreview.interfaces;
public interface CompleteCallback {
void onResult();
}

@ -0,0 +1,8 @@
package com.xypower.mppreview.interfaces;
import android.view.View;
public interface OnItemClickListener {
void onItemClick(View v, int position);
}

@ -0,0 +1,49 @@
package com.xypower.mppreview.ui;
import androidx.appcompat.app.AppCompatActivity;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import android.view.Window;
import android.view.WindowManager;
import com.xypower.mppreview.Camera2RawFragment;
import com.xypower.mppreview.R;
import com.xypower.mppreview.bean.Contants;
public class CameraActivity extends AppCompatActivity {
private final static String TAG = "HDR";
public int intExtra;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
setContentView(R.layout.activity_camera);
Intent intent = getIntent();
intExtra = intent.getIntExtra(Contants.HDRNUM, 0);
if (null == savedInstanceState) {
Bundle bundle = new Bundle();
bundle.putInt(Contants.HDRNUM,intExtra);
getSupportFragmentManager().beginTransaction().setReorderingAllowed(true).replace(R.id.container, Camera2RawFragment.class, bundle).commit();
}
}
@Override
public void onTrimMemory(int level) {
Log.w(TAG, "Event onTrimMemory level=" + level);
try {
System.gc();
} catch (Exception ex) {
ex.printStackTrace();
}
}
@Override
public void onLowMemory() {
Log.w(TAG, "Event onLowMemory");
}
}

@ -0,0 +1,148 @@
package com.xypower.mppreview.ui;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.camera.core.CameraInfo;
import androidx.camera.core.CameraSelector;
import androidx.camera.core.ImageAnalysis;
import androidx.camera.core.ImageCapture;
import androidx.camera.core.ImageCaptureException;
import androidx.camera.core.Preview;
import androidx.camera.lifecycle.ProcessCameraProvider;
import androidx.camera.view.PreviewView;
import androidx.core.content.ContextCompat;
import android.net.Uri;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Toast;
import com.google.common.util.concurrent.ListenableFuture;
import com.xypower.mppreview.MyAnalyzer;
import com.xypower.mppreview.R;
import com.xypower.mppreview.bean.Contants;
import com.xypower.mppreview.databinding.ActivityCameraChannelBinding;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.List;
import java.util.Locale;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public class CameraChannelActivity extends AppCompatActivity implements View.OnClickListener {
private com.xypower.mppreview.databinding.ActivityCameraChannelBinding viewBinding;
private int camerid;
private ImageCapture imageCapture;
private ExecutorService cameraExecutor;
private File outputDirectory;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
viewBinding = ActivityCameraChannelBinding.inflate(getLayoutInflater());
setContentView(viewBinding.getRoot());
camerid = getIntent().getIntExtra(Contants.CAMERAID, 0);
initEvent();
// 设置照片等保存的位置
outputDirectory = getOutputDirectory();
cameraExecutor = Executors.newSingleThreadExecutor();
startCamera(camerid);
}
@Override
protected void onDestroy() {
super.onDestroy();
cameraExecutor.shutdown();
}
private void initEvent() {
viewBinding.imageCaptureButton.setOnClickListener(this);
}
@Override
public void onClick(View v) {
takePhoto();
}
private void startCamera(int cameraid) {
// 将Camera的生命周期和Activity绑定在一起设定生命周期所有者这样就不用手动控制相机的启动和关闭。
ListenableFuture<ProcessCameraProvider> cameraProviderFuture = ProcessCameraProvider.getInstance(this);
cameraProviderFuture.addListener(() -> {
try {
// 将你的相机和当前生命周期的所有者绑定所需的对象
ProcessCameraProvider processCameraProvider = cameraProviderFuture.get();
// 创建一个Preview 实例,并设置该实例的 surface 提供者provider
PreviewView viewFinder = viewBinding.viewFinder;
Preview preview = new Preview.Builder().build();
preview.setSurfaceProvider(viewFinder.getSurfaceProvider());
// 选择后置摄像头作为默认摄像头
// CameraSelector cameraSelector = CameraSelector.DEFAULT_BACK_CAMERA;
List<CameraInfo> availableCameraInfos = processCameraProvider.getAvailableCameraInfos();
CameraInfo cameraInfo = availableCameraInfos.get(cameraid);
CameraSelector cameraSelector = cameraInfo.getCameraSelector();
// 创建拍照所需的实例
imageCapture = new ImageCapture.Builder().build();
// 设置预览帧分析
ImageAnalysis imageAnalysis = new ImageAnalysis.Builder().build();
imageAnalysis.setAnalyzer(cameraExecutor, new MyAnalyzer());
// 重新绑定用例前先解绑
processCameraProvider.unbindAll();
// 绑定用例至相机
processCameraProvider.bindToLifecycle(CameraChannelActivity.this, cameraSelector, preview,imageCapture,imageAnalysis);
} catch (Exception e) {
Log.e(Contants.TAG, "用例绑定失败!" + e);
}
}, ContextCompat.getMainExecutor(this));
}
private void takePhoto() {
// 确保imageCapture 已经被实例化, 否则程序将可能崩溃
if (imageCapture != null) {
// 创建带时间戳的输出文件以保存图片,带时间戳是为了保证文件名唯一
File photoFile = new File(outputDirectory, new SimpleDateFormat(Contants.FILENAME_FORMAT, Locale.SIMPLIFIED_CHINESE).format(System.currentTimeMillis()) + ".jpg");
// 创建 output option 对象,用以指定照片的输出方式
ImageCapture.OutputFileOptions outputFileOptions = new ImageCapture.OutputFileOptions.Builder(photoFile).build();
// 执行takePicture拍照方法
imageCapture.takePicture(outputFileOptions, ContextCompat.getMainExecutor(this), new ImageCapture.OnImageSavedCallback() {// 保存照片时的回调
@Override
public void onImageSaved(@NonNull ImageCapture.OutputFileResults outputFileResults) {
Uri savedUri = Uri.fromFile(photoFile);
String msg = "照片捕获成功! " + savedUri;
Toast.makeText(getBaseContext(), msg, Toast.LENGTH_SHORT).show();
Log.d(Contants.TAG, msg);
}
@Override
public void onError(@NonNull ImageCaptureException exception) {
Log.e(Contants.TAG, "Photo capture failed: " + exception.getMessage());
}
});
}
}
private File getOutputDirectory() {
File mediaDir = new File(getExternalMediaDirs()[0], getString(R.string.app_name));
boolean isExist = mediaDir.exists() || mediaDir.mkdir();
return isExist ? mediaDir : null;
}
}

@ -0,0 +1,28 @@
package com.xypower.mppreview.utils;
import android.content.Context;
import androidx.camera.core.CameraInfo;
import androidx.camera.lifecycle.ProcessCameraProvider;
import com.google.common.util.concurrent.ListenableFuture;
import java.util.List;
import java.util.concurrent.ExecutionException;
public class CameraUtils {
public static int getNumberOfCameras(Context context) {
List<CameraInfo> availableCameraInfos;
ListenableFuture<ProcessCameraProvider> instance = ProcessCameraProvider.getInstance(context);
try {
ProcessCameraProvider processCameraProvider = instance.get();
availableCameraInfos = processCameraProvider.getAvailableCameraInfos();
} catch (ExecutionException e) {
throw new RuntimeException(e);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
return availableCameraInfos.size();
}
}

@ -0,0 +1,142 @@
package com.xypower.mppreview.utils;
import android.app.Activity;
import android.content.Context;
import android.content.res.AssetManager;
import android.util.Log;
import java.io.Closeable;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.Locale;
public class HdrUtil {
public static void copyAssetsDir(Context context, String directory, String destPath) {
try {
AssetManager assetManager = context.getAssets();
String[] fileList = assetManager.list(directory);
if (fileList != null && fileList.length > 0) {
File file = new File(destPath);
if (!file.exists()) {
file.mkdirs();
}
if (!directory.endsWith(File.separator)) {
directory += File.separator;
}
if (!destPath.endsWith(File.separator)) {
destPath += File.separator;
}
for (String fileName : fileList) {
copyAssetsDir(context, directory + fileName, destPath + fileName);
}
} else {
// Try to file
copyAssetsFile(context, directory, destPath);
}
} catch (Exception e) {
e.printStackTrace();
}
// else {//如果是文件
// InputStream inputStream=context.getAssets().open(filePath);
// File file=new File(context.getFilesDir().getAbsolutePath()+ File.separator+filePath);
// Log.i("copyAssets2Phone","file:"+file);
// if(!file.exists() || file.length()==0) {
// FileOutputStream fos=new FileOutputStream(file);
// int len=-1;
// byte[] buffer=new byte[1024];
// while ((len=inputStream.read(buffer))!=-1){
// fos.write(buffer,0,len);
// }
// fos.flush();
// inputStream.close();
// fos.close();
// showToast(context,"模型文件复制完毕");
// } else {
// showToast(context,"模型文件已存在,无需复制");
// }
// }
}
public static void copyAssetsFile(Context context, String fileName, String destPath) {
InputStream inputStream = null;
FileOutputStream fos = null;
try {
inputStream = context.getAssets().open(fileName);
//getFilesDir() 获得当前APP的安装路径 /data/data/包名/files 目录
File file = new File(destPath);
if (file.exists()) {
file.delete();
}
fos = new FileOutputStream(file);
int len = -1;
byte[] buffer = new byte[1024];
while ((len = inputStream.read(buffer)) != -1) {
fos.write(buffer, 0, len);
}
fos.flush();
} catch (Exception e) {
e.printStackTrace();
} finally {
closeFriendly(inputStream);
closeFriendly(fos);
}
}
public static void closeFriendly(Closeable closeable) {
if (closeable != null) {
try {
closeable.close();
} catch (Exception ex) {
}
closeable = null;
}
}
public static String generateTimestamp() {
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd_HHmmss_SSS", Locale.US);
return sdf.format(new Date());
}
// public static void createHDR(Mat[] images, float[] exposureTimes, Mat hdrImage,String filepath) {
// Mat[] images32f = new Mat[images.length];
// for (int i = 0; i < images.length; i++) {
// images32f[i] = new Mat();
// images[i].convertTo(images32f[i], CvType.CV_32F);
// }
//
// Mat response = new Mat();
// Mat times = new Mat(exposureTimes.length, 1, CvType.CV_32F);
// for (int i = 0; i < exposureTimes.length; i++) {
// times.put(i, 0, exposureTimes[i]);
// }
//
// // Calibrate the camera response
// CalibrateDebevec calibrate = createCalibrateDebevec();
// calibrate.process(Arrays.asList(images32f), response, times);
//
// // Merge the images into an HDR image
// MergeDebevec merge = createMergeDebevec();
// merge.process(Arrays.asList(images32f), hdrImage, times, response);
// saveHDRImage(hdrImage,filepath);
// }
//
// public static void saveHDRImage(Mat hdrImage, String filePath) {
// Imgcodecs.imwrite(filePath, hdrImage);
// }
}

@ -0,0 +1,304 @@
package com.xypower.mppreview.utils;
import android.content.Context;
import android.content.Intent;
import android.net.Uri;
import android.os.Build;
import android.os.Environment;
import android.provider.MediaStore;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import android.graphics.Bitmap;
import androidx.activity.result.ActivityResultLauncher;
import androidx.core.content.FileProvider;
import java.io.File;
import java.io.FileOutputStream;
import java.text.SimpleDateFormat;
import java.util.Date;
public class PhotoUtil {
/**
* photoUri
*/
private static Uri photoUri;
/**
*
*/
public static void openCamera(Context context, ActivityResultLauncher<Intent> photoResultLauncher) {
Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
//确保有相机来处理Intent
if (takePictureIntent.resolveActivity(context.getPackageManager()) != null) {
File photoFile = saveFileName(context);
if (photoFile != null) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
//适配Android 7.0文件权限通过FileProvider创建一个content类型的Uri
photoUri = FileProvider.getUriForFile(context.getApplicationContext(), "com.stg.rouge.activity.fileprovider", photoFile);
} else {
photoUri = getDestinationUri(context);
}
takePictureIntent.addFlags(Intent.FLAG_GRANT_WRITE_URI_PERMISSION);
takePictureIntent.putExtra(MediaStore.EXTRA_OUTPUT, photoUri);
photoResultLauncher.launch(takePictureIntent);
}
}
}
/**
* Uri
*
* @return
*/
private static Uri getDestinationUri(Context context) {
String fileName = String.format("winetalk_%s.jpg", System.currentTimeMillis());
File cropFile = new File(context.getExternalFilesDir(Environment.DIRECTORY_PICTURES), fileName);
return Uri.fromFile(cropFile);
}
//当前路径,拍照回调后需要使用
private String currentPath = null;
/**
*
*
* @return
*/
private static File saveFileName(Context context) {
// File newFolder = context.getExternalFilesDir(Environment.DIRECTORY_PICTURES);
String outputPath = "/sdcard/DCIM";
SimpleDateFormat format = new SimpleDateFormat("yyyyMMddHHmmss");
Date date = new Date(System.currentTimeMillis());
String name = format.format(date) + ".jpg";
File ji = null;
try {
ji = new File(outputPath + "/" + name);
ji.createNewFile();
// currentPath = ji.getAbsolutePath();
} catch (Exception e) {
e.printStackTrace();
}
return ji;
}
private final static int BMP_WIDTH_OF_TIMES = 4;
private final static int BYTE_PER_PIXEL = 3;
/**
* Android Bitmap Object to Window's v3 24bit Bmp Format File
* @param orgBitmap
* @param filePath
* @return file saved result
*/
public static boolean save(Bitmap orgBitmap, String filePath){
if(orgBitmap == null){
return false;
}
if(filePath == null){
return false;
}
boolean isSaveSuccess = true;
//image size
int width = orgBitmap.getWidth();
int height = orgBitmap.getHeight();
//image dummy data size
//reason : bmp file's width equals 4's multiple
int dummySize = 0;
byte[] dummyBytesPerRow = null;
boolean hasDummy = false;
if(isBmpWidth4Times(width)){
hasDummy = true;
dummySize = BMP_WIDTH_OF_TIMES - (width % BMP_WIDTH_OF_TIMES);
dummyBytesPerRow = new byte[dummySize * BYTE_PER_PIXEL];
for(int i = 0; i < dummyBytesPerRow.length; i++){
dummyBytesPerRow[i] = (byte)0xFF;
}
}
int[] pixels = new int[width * height];
int imageSize = pixels.length * BYTE_PER_PIXEL + (height * dummySize * BYTE_PER_PIXEL);
int imageDataOffset = 0x36;
int fileSize = imageSize + imageDataOffset;
//Android Bitmap Image Data
orgBitmap.getPixels(pixels, 0, width, 0, 0, width, height);
//ByteArrayOutputStream baos = new ByteArrayOutputStream(fileSize);
ByteBuffer buffer = ByteBuffer.allocate(fileSize);
try {
/**
* BITMAP FILE HEADER Write Start
**/
buffer.put((byte)0x42);
buffer.put((byte)0x4D);
//size
buffer.put(writeInt(fileSize));
//reserved
buffer.put(writeShort((short)0));
buffer.put(writeShort((short)0));
//image data start offset
buffer.put(writeInt(imageDataOffset));
/** BITMAP FILE HEADER Write End */
//*******************************************
/** BITMAP INFO HEADER Write Start */
//size
buffer.put(writeInt(0x28));
//width, height
buffer.put(writeInt(width));
buffer.put(writeInt(height));
//planes
buffer.put(writeShort((short)1));
//bit count
buffer.put(writeShort((short)24));
//bit compression
buffer.put(writeInt(0));
//image data size
buffer.put(writeInt(imageSize));
//horizontal resolution in pixels per meter
buffer.put(writeInt(0));
//vertical resolution in pixels per meter (unreliable)
buffer.put(writeInt(0));
//컬러 사용 유무
buffer.put(writeInt(0));
//중요하게 사용하는 색
buffer.put(writeInt(0));
/** BITMAP INFO HEADER Write End */
int row = height;
int col = width;
int startPosition = 0;
int endPosition = 0;
while( row > 0 ){
startPosition = (row - 1) * col;
endPosition = row * col;
for(int i = startPosition; i < endPosition; i++ ){
buffer.put(write24BitForPixcel(pixels[i]));
if(hasDummy){
if(isBitmapWidthLastPixcel(width, i)){
buffer.put(dummyBytesPerRow);
}
}
}
row--;
}
FileOutputStream fos = new FileOutputStream(filePath);
fos.write(buffer.array());
fos.close();
} catch (IOException e1) {
e1.printStackTrace();
isSaveSuccess = false;
}
finally{
}
return isSaveSuccess;
}
/**
* Is last pixel in Android Bitmap width
* @param width
* @param i
* @return
*/
private static boolean isBitmapWidthLastPixcel(int width, int i) {
return i > 0 && (i % (width - 1)) == 0;
}
/**
* BMP file is a multiples of 4?
* @param width
* @return
*/
private static boolean isBmpWidth4Times(int width) {
return width % BMP_WIDTH_OF_TIMES > 0;
}
/**
* Write integer to little-endian
* @param value
* @return
* @throws IOException
*/
private static byte[] writeInt(int value) throws IOException {
byte[] b = new byte[4];
b[0] = (byte)(value & 0x000000FF);
b[1] = (byte)((value & 0x0000FF00) >> 8);
b[2] = (byte)((value & 0x00FF0000) >> 16);
b[3] = (byte)((value & 0xFF000000) >> 24);
return b;
}
/**
* Write integer pixel to little-endian byte array
* @param value
* @return
* @throws IOException
*/
private static byte[] write24BitForPixcel(int value) throws IOException {
byte[] b = new byte[3];
b[0] = (byte)(value & 0x000000FF);
b[1] = (byte)((value & 0x0000FF00) >> 8);
b[2] = (byte)((value & 0x00FF0000) >> 16);
return b;
}
/**
* Write short to little-endian byte array
* @param value
* @return
* @throws IOException
*/
private static byte[] writeShort(short value) throws IOException {
byte[] b = new byte[2];
b[0] = (byte)(value & 0x00FF);
b[1] = (byte)((value & 0xFF00) >> 8);
return b;
}
}

@ -0,0 +1,36 @@
package com.xypower.mppreview.widget;
import android.app.Activity;
import android.app.Dialog;
import android.content.DialogInterface;
import android.os.Bundle;
import androidx.appcompat.app.AlertDialog;
import androidx.fragment.app.DialogFragment;
public class ErrorDialog extends DialogFragment {
private String mErrorMessage;
public ErrorDialog() {
mErrorMessage = "Unknown error occurred!";
}
// Build a dialog with a custom message (Fragments require default constructor).
public static ErrorDialog buildErrorDialog(String errorMessage) {
ErrorDialog dialog = new ErrorDialog();
dialog.mErrorMessage = errorMessage;
return dialog;
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Activity activity = getActivity();
return new AlertDialog.Builder(activity).setMessage(mErrorMessage).setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
activity.finish();
}
}).create();
}
}

@ -0,0 +1,35 @@
package com.xypower.mppreview.widget;
import android.app.Dialog;
import android.content.DialogInterface;
import android.os.Bundle;
import androidx.appcompat.app.AlertDialog;
import androidx.fragment.app.DialogFragment;
import androidx.fragment.app.Fragment;
import com.xypower.mppreview.R;
public class PermissionConfirmationDialog extends DialogFragment {
public static PermissionConfirmationDialog newInstance() {
return new PermissionConfirmationDialog();
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Fragment parent = getParentFragment();
return new AlertDialog.Builder(getActivity()).setMessage(R.string.request_permission).setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
}
}).setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
getActivity().finish();
}
}).create();
}
}

@ -0,0 +1,11 @@
<?xml version="1.0" encoding="utf-8"?>
<rotate xmlns:android="http://schemas.android.com/apk/res/android"
android:duration="1500"
android:fromDegrees="0"
android:pivotX="50%"
android:pivotY="50%"
android:repeatCount="infinite"
android:toDegrees="360">
<!--fromDegrees起始旋转度数 toDegrees终始旋转度数 duration动画时长-->
<!--pivotXpivotY 写百分比是以自身为旋转点,写数值是以像素为坐标点 -->
</rotate>

@ -0,0 +1,30 @@
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
<aapt:attr name="android:fillColor">
<gradient
android:endX="85.84757"
android:endY="92.4963"
android:startX="42.9492"
android:startY="49.59793"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>

Before

Width:  |  Height:  |  Size: 1.0 KiB

After

Width:  |  Height:  |  Size: 1.0 KiB

@ -0,0 +1,170 @@
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillColor="#3DDC84"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
</vector>

@ -0,0 +1,9 @@
<?xml version="1.0" encoding="utf-8"?>
<shape xmlns:android="http://schemas.android.com/apk/res/android"
android:shape="oval">
<solid android:color="@color/shutter_icon"/>
<stroke
android:width="5dp"
android:color="#FFFFFF"
/>
</shape>

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<shape xmlns:android="http://schemas.android.com/apk/res/android"
android:shape="oval">
<solid android:color="@color/shutter_icon_pressed"/>
</shape>

Before

Width:  |  Height:  |  Size: 196 B

After

Width:  |  Height:  |  Size: 196 B

@ -0,0 +1,17 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:keepScreenOn="true"
tools:context=".ui.CameraActivity">
<!-- res/layout/example_activity.xml -->
<androidx.fragment.app.FragmentContainerView
android:id="@+id/container"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</androidx.constraintlayout.widget.ConstraintLayout>

@ -0,0 +1,46 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:keepScreenOn="true"
tools:context=".ui.CameraChannelActivity">
<androidx.camera.view.PreviewView
android:id="@+id/viewFinder"
android:layout_width="match_parent"
android:layout_height="match_parent" />
<!-- <Button-->
<!-- android:id="@+id/image_capture_button"-->
<!-- android:layout_width="110dp"-->
<!-- android:layout_height="110dp"-->
<!-- android:layout_marginEnd="50dp"-->
<!-- android:layout_marginBottom="50dp"-->
<!-- android:elevation="2dp"-->
<!-- android:text="拍照"-->
<!-- app:layout_constraintBottom_toBottomOf="parent"-->
<!-- app:layout_constraintEnd_toStartOf="@id/vertical_centerline"-->
<!-- app:layout_constraintLeft_toLeftOf="parent" />-->
<Button
android:id="@+id/image_capture_button"
android:layout_width="50dp"
android:layout_height="50dp"
android:layout_marginBottom="40dp"
android:background="@mipmap/takepic"
android:shadowColor="@null"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent" />
<androidx.constraintlayout.widget.Guideline
android:id="@+id/vertical_centerline"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:orientation="vertical"
app:layout_constraintGuide_percent=".50" />
</androidx.constraintlayout.widget.ConstraintLayout>

@ -0,0 +1,118 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:paddingLeft="15dp"
android:paddingTop="20dp"
android:paddingRight="15dp"
android:keepScreenOn="true"
tools:context=".MainActivity">
<TextView
android:id="@+id/hdrhint"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="曝光补偿:"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintTop_toTopOf="parent" />
<Spinner
android:id="@+id/spinner"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:entries="@array/size"
app:layout_constraintBottom_toBottomOf="@+id/hdrhint"
app:layout_constraintLeft_toRightOf="@+id/hdrhint"
app:layout_constraintTop_toTopOf="@+id/hdrhint" />
<Button
android:id="@+id/hdrtakepic"
android:layout_width="100dp"
android:layout_height="wrap_content"
android:layout_marginTop="20dp"
android:text="HDR拍照"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toBottomOf="@+id/hdrhint" />
<Button
android:id="@+id/systakepic"
android:layout_width="100dp"
android:layout_height="wrap_content"
android:layout_marginLeft="10dp"
android:text="拍照"
app:layout_constraintLeft_toRightOf="@id/hdrtakepic"
app:layout_constraintTop_toTopOf="@+id/hdrtakepic" />
<Button
android:id="@+id/btnTest"
android:layout_width="100dp"
android:layout_height="wrap_content"
android:layout_marginLeft="10dp"
android:text="Test"
app:layout_constraintLeft_toRightOf="@id/systakepic"
app:layout_constraintTop_toTopOf="@+id/hdrtakepic" />
<!-- <Button-->
<!-- android:id="@+id/channel1"-->
<!-- android:layout_width="100dp"-->
<!-- android:layout_height="wrap_content"-->
<!-- android:text="通道一"-->
<!-- app:layout_constraintLeft_toLeftOf="parent"-->
<!-- app:layout_constraintTop_toBottomOf="@+id/hdrtakepic" />-->
<!-- <Button-->
<!-- android:id="@+id/channel2"-->
<!-- android:layout_width="100dp"-->
<!-- android:layout_height="wrap_content"-->
<!-- android:layout_marginLeft="15dp"-->
<!-- android:text="通道二"-->
<!-- app:layout_constraintLeft_toRightOf="@+id/channel1"-->
<!-- app:layout_constraintTop_toTopOf="@+id/channel1" />-->
<!-- <Button-->
<!-- android:id="@+id/channel3"-->
<!-- android:layout_width="100dp"-->
<!-- android:layout_height="wrap_content"-->
<!-- android:layout_marginLeft="15dp"-->
<!-- android:text="通道三"-->
<!-- app:layout_constraintLeft_toRightOf="@+id/channel2"-->
<!-- app:layout_constraintTop_toTopOf="@+id/channel1" />-->
<!-- <Button-->
<!-- android:id="@+id/channel4"-->
<!-- android:layout_width="100dp"-->
<!-- android:layout_height="wrap_content"-->
<!-- android:text="通道四"-->
<!-- app:layout_constraintLeft_toLeftOf="parent"-->
<!-- app:layout_constraintTop_toBottomOf="@+id/channel1" />-->
<!-- <Button-->
<!-- android:id="@+id/channel5"-->
<!-- android:layout_width="100dp"-->
<!-- android:layout_height="wrap_content"-->
<!-- android:layout_marginLeft="15dp"-->
<!-- android:text="通道五"-->
<!-- app:layout_constraintLeft_toRightOf="@+id/channel4"-->
<!-- app:layout_constraintTop_toTopOf="@+id/channel4" />-->
<!-- <Button-->
<!-- android:id="@+id/channel6"-->
<!-- android:layout_width="100dp"-->
<!-- android:layout_height="wrap_content"-->
<!-- android:layout_marginLeft="15dp"-->
<!-- android:text="通道六"-->
<!-- app:layout_constraintLeft_toRightOf="@+id/channel5"-->
<!-- app:layout_constraintTop_toTopOf="@+id/channel4" />-->
<androidx.recyclerview.widget.RecyclerView
app:layout_constraintTop_toBottomOf="@id/systakepic"
app:layout_constraintBottom_toBottomOf="parent"
android:id="@+id/recyclerView"
android:layout_width="match_parent"
android:layout_height="0dp"/>
</androidx.constraintlayout.widget.ConstraintLayout>

@ -0,0 +1,38 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".Camera2RawFragment">
<com.xypower.mppreview.AutoFitTextureView
android:id="@+id/texture"
android:layout_width="match_parent"
android:layout_height="match_parent" />
<Button
android:id="@+id/takepic"
android:layout_width="50dp"
android:layout_height="50dp"
android:layout_marginBottom="40dp"
android:background="@mipmap/takepic"
android:shadowColor="@null"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent" />
<ImageView
android:visibility="invisible"
android:id="@+id/rorpic"
android:src="@mipmap/ror"
app:layout_constraintTop_toTopOf="@+id/takepic"
app:layout_constraintBottom_toBottomOf="@+id/takepic"
app:layout_constraintLeft_toLeftOf="@+id/takepic"
app:layout_constraintRight_toRightOf="@+id/takepic"
android:layout_width="45dp"
android:layout_height="45dp">
</ImageView>
</androidx.constraintlayout.widget.ConstraintLayout>

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="wrap_content"
android:layout_height="wrap_content">
<Button
android:id="@+id/channel"
android:layout_width="100dp"
android:layout_height="wrap_content"
tools:ignore="MissingConstraints" />
</androidx.constraintlayout.widget.ConstraintLayout>

@ -0,0 +1,80 @@
<?xml version="1.0" encoding="utf-8"?>
<menu xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
tools:context="com.xypower.mpmaster.MainActivity">
<item
android:id="@+id/action_picture"
android:orderInCategory="100"
android:title="Capture"
app:showAsAction="always" />
<item
android:id="@+id/action_focus"
android:orderInCategory="100"
android:title="Focus"
app:showAsAction="always" />
<item
android:id="@+id/action_comp0"
android:orderInCategory="100"
android:title="Reset"
app:showAsAction="never" />
<item
android:id="@+id/action_comp1"
android:orderInCategory="100"
android:title="X1"
app:showAsAction="never" />
<item
android:id="@+id/action_comp2"
android:orderInCategory="100"
android:title="X2"
app:showAsAction="never" />
<item
android:id="@+id/action_comp3"
android:orderInCategory="100"
android:title="X3"
app:showAsAction="never" />
<item
android:id="@+id/action_comp4"
android:orderInCategory="100"
android:title="X4"
app:showAsAction="never" />
<item
android:id="@+id/action_comp_1"
android:orderInCategory="100"
android:title="-X1"
app:showAsAction="never" />
<item
android:id="@+id/action_comp_2"
android:orderInCategory="100"
android:title="-X2"
app:showAsAction="never" />
<item
android:id="@+id/action_comp_3"
android:orderInCategory="100"
android:title="-X3"
app:showAsAction="never" />
<item
android:id="@+id/action_comp_4"
android:orderInCategory="100"
android:title="-X4"
app:showAsAction="never" />
<item
android:id="@+id/action_info"
android:orderInCategory="100"
android:title="Info"
app:showAsAction="never" />
</menu>

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 982 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save