移除采样过程中的水平旋转、移除opencv库,修复:https://github.com/devzwy/open_nsfw_android/issues/4#issuecomment-494745262

pull/34/head 1.2.6
jason 6 years ago
parent 6a012d8fc0
commit 7f54e3cfc1
  1. 1
      .idea/gradle.xml
  2. 2
      .idea/misc.xml
  3. 16
      README.md
  4. 8
      app/build.gradle
  5. 8
      app/src/main/java/com/example/open_nsfw_android/MainActivity.kt
  6. 9
      app/src/main/java/com/example/open_nsfw_android/MainAdapter.kt
  7. 1
      app/src/main/res/layout/activity_main.xml
  8. 8
      app/src/main/res/values/colors.xml
  9. 44
      nsfw/CMakeLists.txt
  10. 39
      nsfw/build.gradle
  11. BIN
      nsfw/libs/arm64-v8a/libopencv_java3.so
  12. BIN
      nsfw/libs/armeabi-v7a/libopencv_java3.so
  13. BIN
      nsfw/libs/armeabi/libopencv_java3.so
  14. BIN
      nsfw/libs/mips/libopencv_java3.so
  15. BIN
      nsfw/libs/mips64/libopencv_java3.so
  16. BIN
      nsfw/libs/x86/libopencv_java3.so
  17. BIN
      nsfw/libs/x86_64/libopencv_java3.so
  18. 24
      nsfw/src/main/cpp/native-lib.cpp
  19. 126
      nsfw/src/main/java/com/zwy/nsfw/Classifier.java
  20. 10
      nsfw/src/main/java/com/zwy/nsfw/JniLoader.java
  21. 17
      openCVLibrary340/build.gradle
  22. 199
      openCVLibrary340/build/generated/aidl_source_output_dir/debug/compileDebugAidl/out/org/opencv/engine/OpenCVEngineInterface.java
  23. 13
      openCVLibrary340/build/generated/source/buildConfig/debug/org/opencv/BuildConfig.java
  24. 9
      openCVLibrary340/build/intermediates/aapt_friendly_merged_manifests/debug/aapt/AndroidManifest.xml
  25. 1
      openCVLibrary340/build/intermediates/aapt_friendly_merged_manifests/debug/aapt/output.json
  26. 9
      openCVLibrary340/build/intermediates/library_manifest/debug/AndroidManifest.xml
  27. 1
      openCVLibrary340/build/intermediates/merged_manifests/debug/output.json
  28. 25
      openCVLibrary340/build/outputs/logs/manifest-merger-debug-report.txt
  29. 9
      openCVLibrary340/lint.xml
  30. 5
      openCVLibrary340/src/main/AndroidManifest.xml
  31. 33
      openCVLibrary340/src/main/aidl/org/opencv/engine/OpenCVEngineInterface.aidl
  32. 391
      openCVLibrary340/src/main/java/org/opencv/android/AsyncServiceHelper.java
  33. 141
      openCVLibrary340/src/main/java/org/opencv/android/BaseLoaderCallback.java
  34. 302
      openCVLibrary340/src/main/java/org/opencv/android/Camera2Renderer.java
  35. 495
      openCVLibrary340/src/main/java/org/opencv/android/CameraBridgeViewBase.java
  36. 440
      openCVLibrary340/src/main/java/org/opencv/android/CameraGLRendererBase.java
  37. 119
      openCVLibrary340/src/main/java/org/opencv/android/CameraGLSurfaceView.java
  38. 166
      openCVLibrary340/src/main/java/org/opencv/android/CameraRenderer.java
  39. 66
      openCVLibrary340/src/main/java/org/opencv/android/FpsMeter.java
  40. 34
      openCVLibrary340/src/main/java/org/opencv/android/InstallCallbackInterface.java
  41. 379
      openCVLibrary340/src/main/java/org/opencv/android/JavaCameraView.java
  42. 40
      openCVLibrary340/src/main/java/org/opencv/android/LoaderCallbackInterface.java
  43. 132
      openCVLibrary340/src/main/java/org/opencv/android/OpenCVLoader.java
  44. 104
      openCVLibrary340/src/main/java/org/opencv/android/StaticHelper.java
  45. 139
      openCVLibrary340/src/main/java/org/opencv/android/Utils.java
  46. 1574
      openCVLibrary340/src/main/java/org/opencv/calib3d/Calib3d.java
  47. 331
      openCVLibrary340/src/main/java/org/opencv/calib3d/StereoBM.java
  48. 254
      openCVLibrary340/src/main/java/org/opencv/calib3d/StereoMatcher.java
  49. 231
      openCVLibrary340/src/main/java/org/opencv/calib3d/StereoSGBM.java
  50. 111
      openCVLibrary340/src/main/java/org/opencv/core/Algorithm.java
  51. 2745
      openCVLibrary340/src/main/java/org/opencv/core/Core.java
  52. 15
      openCVLibrary340/src/main/java/org/opencv/core/CvException.java
  53. 136
      openCVLibrary340/src/main/java/org/opencv/core/CvType.java
  54. 58
      openCVLibrary340/src/main/java/org/opencv/core/DMatch.java
  55. 83
      openCVLibrary340/src/main/java/org/opencv/core/KeyPoint.java
  56. 1334
      openCVLibrary340/src/main/java/org/opencv/core/Mat.java
  57. 79
      openCVLibrary340/src/main/java/org/opencv/core/MatOfByte.java
  58. 83
      openCVLibrary340/src/main/java/org/opencv/core/MatOfDMatch.java
  59. 79
      openCVLibrary340/src/main/java/org/opencv/core/MatOfDouble.java
  60. 79
      openCVLibrary340/src/main/java/org/opencv/core/MatOfFloat.java
  61. 79
      openCVLibrary340/src/main/java/org/opencv/core/MatOfFloat4.java
  62. 79
      openCVLibrary340/src/main/java/org/opencv/core/MatOfFloat6.java
  63. 80
      openCVLibrary340/src/main/java/org/opencv/core/MatOfInt.java
  64. 80
      openCVLibrary340/src/main/java/org/opencv/core/MatOfInt4.java
  65. 86
      openCVLibrary340/src/main/java/org/opencv/core/MatOfKeyPoint.java
  66. 78
      openCVLibrary340/src/main/java/org/opencv/core/MatOfPoint.java
  67. 78
      openCVLibrary340/src/main/java/org/opencv/core/MatOfPoint2f.java
  68. 79
      openCVLibrary340/src/main/java/org/opencv/core/MatOfPoint3.java
  69. 79
      openCVLibrary340/src/main/java/org/opencv/core/MatOfPoint3f.java
  70. 81
      openCVLibrary340/src/main/java/org/opencv/core/MatOfRect.java
  71. 81
      openCVLibrary340/src/main/java/org/opencv/core/MatOfRect2d.java
  72. 68
      openCVLibrary340/src/main/java/org/opencv/core/Point.java
  73. 79
      openCVLibrary340/src/main/java/org/opencv/core/Point3.java
  74. 82
      openCVLibrary340/src/main/java/org/opencv/core/Range.java
  75. 104
      openCVLibrary340/src/main/java/org/opencv/core/Rect.java
  76. 104
      openCVLibrary340/src/main/java/org/opencv/core/Rect2d.java
  77. 113
      openCVLibrary340/src/main/java/org/opencv/core/RotatedRect.java
  78. 90
      openCVLibrary340/src/main/java/org/opencv/core/Scalar.java
  79. 73
      openCVLibrary340/src/main/java/org/opencv/core/Size.java
  80. 92
      openCVLibrary340/src/main/java/org/opencv/core/TermCriteria.java
  81. 182
      openCVLibrary340/src/main/java/org/opencv/core/TickMeter.java
  82. 212
      openCVLibrary340/src/main/java/org/opencv/dnn/DictValue.java
  83. 279
      openCVLibrary340/src/main/java/org/opencv/dnn/Dnn.java
  84. 176
      openCVLibrary340/src/main/java/org/opencv/dnn/Layer.java
  85. 601
      openCVLibrary340/src/main/java/org/opencv/dnn/Net.java
  86. 316
      openCVLibrary340/src/main/java/org/opencv/features2d/AKAZE.java
  87. 182
      openCVLibrary340/src/main/java/org/opencv/features2d/AgastFeatureDetector.java
  88. 81
      openCVLibrary340/src/main/java/org/opencv/features2d/BFMatcher.java
  89. 125
      openCVLibrary340/src/main/java/org/opencv/features2d/BOWImgDescriptorExtractor.java
  90. 89
      openCVLibrary340/src/main/java/org/opencv/features2d/BOWKMeansTrainer.java
  91. 134
      openCVLibrary340/src/main/java/org/opencv/features2d/BOWTrainer.java
  92. 137
      openCVLibrary340/src/main/java/org/opencv/features2d/BRISK.java
  93. 197
      openCVLibrary340/src/main/java/org/opencv/features2d/DescriptorExtractor.java
  94. 426
      openCVLibrary340/src/main/java/org/opencv/features2d/DescriptorMatcher.java
  95. 182
      openCVLibrary340/src/main/java/org/opencv/features2d/FastFeatureDetector.java
  96. 292
      openCVLibrary340/src/main/java/org/opencv/features2d/Feature2D.java
  97. 219
      openCVLibrary340/src/main/java/org/opencv/features2d/FeatureDetector.java
  98. 158
      openCVLibrary340/src/main/java/org/opencv/features2d/Features2d.java
  99. 61
      openCVLibrary340/src/main/java/org/opencv/features2d/FlannBasedMatcher.java
  100. 302
      openCVLibrary340/src/main/java/org/opencv/features2d/GFTTDetector.java
  101. Some files were not shown because too many files have changed in this diff Show More

@ -11,7 +11,6 @@
<option value="$PROJECT_DIR$" /> <option value="$PROJECT_DIR$" />
<option value="$PROJECT_DIR$/app" /> <option value="$PROJECT_DIR$/app" />
<option value="$PROJECT_DIR$/nsfw" /> <option value="$PROJECT_DIR$/nsfw" />
<option value="$PROJECT_DIR$/openCVLibrary340" />
</set> </set>
</option> </option>
<option name="resolveModulePerSourceSet" value="false" /> <option name="resolveModulePerSourceSet" value="false" />

@ -33,7 +33,7 @@
</profile-state> </profile-state>
</entry> </entry>
</component> </component>
<component name="ProjectRootManager" version="2" languageLevel="JDK_1_7" project-jdk-name="1.8" project-jdk-type="JavaSDK"> <component name="ProjectRootManager" version="2" languageLevel="JDK_1_8" project-jdk-name="1.8" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/build/classes" /> <output url="file://$PROJECT_DIR$/build/classes" />
</component> </component>
<component name="ProjectType"> <component name="ProjectType">

@ -47,22 +47,22 @@ __请添加__
nsfwBean?.nsfw ... nsfwBean?.nsfw ...
``` ```
- 项目打包时请使用如下代码确定你使用的ndk,否则会打包全部类型的库文件,导致项目异常庞大 ### [点我下载apk](https://fir.im/nsfw)
```
ndk {
abiFilters 'armeabi-v7a'
}
```
### [点我下载apk(arm+x86)](https://fir.im/nsfw)
### 扫码下载 ### 扫码下载
![图片](https://github.com/devzwy/open_nsfw_android/blob/master/img/2.png) ![图片](https://github.com/devzwy/open_nsfw_android/blob/master/img/2.png)
### Demo运行结果:
## 提示:下面的图片不要在公共场所打开!!!
## 提示:下面的图片不要在公共场所打开!!! ## 提示:下面的图片不要在公共场所打开!!!
## 提示:下面的图片不要在公共场所打开!!!
## 提示:下面的图片不要在公共场所打开!!! ## 提示:下面的图片不要在公共场所打开!!!
## 提示:下面的图片不要在公共场所打开!!! ## 提示:下面的图片不要在公共场所打开!!!
### Demo运行结果(后期手动打码防止屏蔽):
![图片](https://github.com/devzwy/open_nsfw_android/blob/master/img/1.png) ![图片](https://github.com/devzwy/open_nsfw_android/blob/master/img/1.png)

@ -10,12 +10,9 @@ android {
applicationId "com.example.open_nsfw_android" applicationId "com.example.open_nsfw_android"
minSdkVersion 15 minSdkVersion 15
targetSdkVersion 28 targetSdkVersion 28
versionCode 2 versionCode 3
versionName "1.1" versionName "1.2.6"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
ndk {
abiFilters 'armeabi-v7a','x86_64'
}
} }
buildTypes { buildTypes {
debug{ debug{
@ -23,7 +20,6 @@ android {
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
} }
} }
aaptOptions { aaptOptions {
noCompress "tflite" noCompress "tflite"
} }

@ -1,10 +1,13 @@
package com.example.open_nsfw_android package com.example.open_nsfw_android
import android.Manifest
import android.annotation.SuppressLint import android.annotation.SuppressLint
import android.content.Intent import android.content.Intent
import android.content.pm.PackageManager import android.content.pm.PackageManager
import android.graphics.BitmapFactory import android.graphics.BitmapFactory
import android.os.Bundle import android.os.Bundle
import android.support.v4.app.ActivityCompat
import android.support.v4.content.ContextCompat
import android.support.v7.app.AppCompatActivity import android.support.v7.app.AppCompatActivity
import android.support.v7.widget.LinearLayoutManager import android.support.v7.widget.LinearLayoutManager
import android.view.View import android.view.View
@ -32,6 +35,11 @@ class MainActivity : AppCompatActivity(), View.OnClickListener {
initAdapter() initAdapter()
initClickListener() initClickListener()
tv_version.text = "当前版本:${this.packageManager.getPackageInfo(packageName, 0).versionName}" tv_version.text = "当前版本:${this.packageManager.getPackageInfo(packageName, 0).versionName}"
if (ContextCompat.checkSelfPermission(this,
Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED){ //表示未授权时
//进行授权
ActivityCompat.requestPermissions(this, arrayOf(Manifest.permission.WRITE_EXTERNAL_STORAGE),1);
}
} }
override fun onClick(v: View) { override fun onClick(v: View) {

@ -16,20 +16,17 @@ class MainAdapter(nsfwList: List<MyNsfwBean>?) :
val textView = helper.getView<TextView>(R.id.tv_text) val textView = helper.getView<TextView>(R.id.tv_text)
val imageView = helper.getView<ImageView>(R.id.iv) val imageView = helper.getView<ImageView>(R.id.iv)
val view = helper.getView<RelativeLayout>(R.id.view) val view = helper.getView<RelativeLayout>(R.id.view)
var nsfwStr = "色情图片"
var color = ContextCompat.getColor(mContext, R.color.nsfw1) var color = ContextCompat.getColor(mContext, R.color.nsfw1)
when (item.nsfw) { when (item.nsfw) {
in 0.0..0.3 -> { in 0.0..0.2 -> {
nsfwStr = "正常图片"
color = ContextCompat.getColor(mContext, R.color.nsfw3) color = ContextCompat.getColor(mContext, R.color.nsfw3)
} }
in 0.3..0.6 -> { in 0.2..0.8 -> {
nsfwStr = "👙比基尼"
color = ContextCompat.getColor(mContext, R.color.nsfw2) color = ContextCompat.getColor(mContext, R.color.nsfw2)
} }
} }
textView.text = textView.text =
"path = ${"img/${item.path}"} \n\nSFW score: ${item.sfw}\nNSFW score: ${item.nsfw} \n\n 鉴定结果: ${nsfwStr}" "path = ${"img/${item.path}"} \n\nSFW score: ${item.sfw}\n\nNSFW score: ${item.nsfw}"
imageView.setImageBitmap(item.bitmap) imageView.setImageBitmap(item.bitmap)
view.setBackgroundColor(color) view.setBackgroundColor(color)
} }

@ -3,6 +3,7 @@
xmlns:tools="http://schemas.android.com/tools" xmlns:tools="http://schemas.android.com/tools"
android:orientation="vertical" android:orientation="vertical"
android:layout_width="match_parent" android:layout_width="match_parent"
android:layout_height="match_parent"> android:layout_height="match_parent">
<LinearLayout android:layout_width="match_parent" <LinearLayout android:layout_width="match_parent"
android:orientation="horizontal" android:orientation="horizontal"

@ -3,8 +3,8 @@
<color name="colorPrimary">#008577</color> <color name="colorPrimary">#008577</color>
<color name="colorPrimaryDark">#00574B</color> <color name="colorPrimaryDark">#00574B</color>
<color name="semi_transparent">#66000000</color> <color name="semi_transparent">#66000000</color>
<color name="nsfw1">#56FF0000</color> <color name="nsfw1">#96CC1B1B</color>
<color name="nsfw2">#20FD9904</color> <color name="nsfw2">#00BCD4</color>
<color name="nsfw3">#FFFFFF</color> <color name="nsfw3">#4CAF50</color>
<color name="colorAccent">#D81B60</color> <color name="colorAccent">#FF0000</color>
</resources> </resources>

@ -1,44 +0,0 @@
# For more information about using CMake with Android Studio, read the
# documentation: https://d.android.com/studio/projects/add-native-code.html
# Sets the minimum version of CMake required to build the native library.
cmake_minimum_required(VERSION 3.4.1)
# Creates and names a library, sets it as either STATIC
# or SHARED, and provides the relative paths to its source code.
# You can define multiple libraries, and CMake builds them for you.
# Gradle automatically packages shared libraries with your APK.
add_library( # Sets the name of the library.
native-lib
# Sets the library as a shared library.
SHARED
# Provides a relative path to your source file(s).
src/main/cpp/native-lib.cpp )
# Searches for a specified prebuilt library and stores the path as a
# variable. Because CMake includes system libraries in the search path by
# default, you only need to specify the name of the public NDK library
# you want to add. CMake verifies that the library exists before
# completing its build.
find_library( # Sets the name of the path variable.
log-lib
# Specifies the name of the NDK library that
# you want CMake to locate.
log )
# Specifies libraries CMake should link to your target library. You
# can link multiple libraries, such as libraries you define in this
# build script, prebuilt third-party libraries, or system libraries.
target_link_libraries( # Specifies the target library.
native-lib
# Links the target library to the log library
# included in the NDK.
${log-lib} )

@ -3,22 +3,12 @@ apply plugin: 'com.android.library'
android { android {
compileSdkVersion 28 compileSdkVersion 28
defaultConfig { defaultConfig {
minSdkVersion 15 minSdkVersion 15
targetSdkVersion 28 targetSdkVersion 28
versionCode 1 versionCode 1
versionName "1.0" versionName "1.0"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner" testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
externalNativeBuild {
cmake {
cppFlags "-frtti -fexceptions"
}
}
// ndk {
// abiFilters 'armeabi-v7a'
// }
} }
buildTypes { buildTypes {
@ -27,41 +17,14 @@ android {
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro' proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
} }
} }
externalNativeBuild {
cmake {
path "CMakeLists.txt"
}
}
sourceSets {
main {
jniLibs.srcDirs = ['libs']
assets.srcDir("main/assets")
}
}
} }
dependencies { dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar']) implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation 'com.android.support:appcompat-v7:28.0.0' implementation 'com.android.support:appcompat-v7:28.0.0'
testImplementation 'junit:junit:4.12' testImplementation 'junit:junit:4.12'
androidTestImplementation 'com.android.support.test:runner:1.0.2' androidTestImplementation 'com.android.support.test:runner:1.0.2'
androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2' androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2'
implementation 'org.tensorflow:tensorflow-lite:+' implementation 'org.tensorflow:tensorflow-lite:+'
implementation 'org.tensorflow:tensorflow-lite-gpu:+' implementation 'org.tensorflow:tensorflow-lite-gpu:+'
implementation project(path: ':openCVLibrary340') }
}
task nativeLibsToJar(type: Jar, description: 'create a jar archive of the native libs') {
destinationDir file("$buildDir/native-libs")
baseName 'native-libs'
from fileTree(dir: 'libs', include: '**/*.so')
into 'lib/'
}
tasks.withType(JavaCompile) {
compileTask -> compileTask.dependsOn(nativeLibsToJar)
// implementation project(path: ':openCVLibrary340')
}

Binary file not shown.

Binary file not shown.

@ -1,24 +0,0 @@
#include <jni.h>
#include <string>
extern "C"
JNIEXPORT void JNICALL
Java_com_zwy_nsfw_JniLoader_argb2bgr(JNIEnv *env, jobject thiz,
jbyteArray rgbSrc_,
jbyteArray bgrDesc_) {
jbyte *rgbSrc = env->GetByteArrayElements(rgbSrc_, NULL);
jbyte *bgrDesc = env->GetByteArrayElements(bgrDesc_, NULL);
printf("s[]=%s\n","C+++++++++");/*输出数组字符串s[]=Hello,Comrade*/
int wh = env->GetArrayLength(rgbSrc_) / 4 ;
//#pragma omp parallel for
for (int i = 0; i < wh; ++i) {
bgrDesc[i * 3] = rgbSrc[i * 4 + 2]; //B
bgrDesc[i * 3 + 1] = rgbSrc[i * 4 + 1]; //G
bgrDesc[i * 3 + 2] = rgbSrc[i * 4 ]; //R
}
env->ReleaseByteArrayElements(rgbSrc_, rgbSrc, JNI_ABORT);
env->ReleaseByteArrayElements(bgrDesc_, bgrDesc, JNI_COMMIT);
}

@ -3,27 +3,22 @@ package com.zwy.nsfw;
import android.app.Activity; import android.app.Activity;
import android.content.res.AssetFileDescriptor; import android.content.res.AssetFileDescriptor;
import android.graphics.Bitmap; import android.graphics.*;
import android.graphics.Color; import android.os.Environment;
import android.graphics.Matrix;
import android.os.SystemClock; import android.os.SystemClock;
import android.util.Log; import android.util.Log;
import com.zwy.nsfw.api.NsfwBean; import com.zwy.nsfw.api.NsfwBean;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.Utils;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.tensorflow.lite.Interpreter; import org.tensorflow.lite.Interpreter;
import org.tensorflow.lite.Tensor; import org.tensorflow.lite.Tensor;
import org.tensorflow.lite.gpu.GpuDelegate; import org.tensorflow.lite.gpu.GpuDelegate;
import java.io.FileInputStream; import java.io.*;
import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.ByteOrder; import java.nio.ByteOrder;
import java.nio.MappedByteBuffer; import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel; import java.nio.channels.FileChannel;
import java.util.ArrayList;
import java.util.List;
import static java.lang.Math.max; import static java.lang.Math.max;
@ -52,7 +47,7 @@ public class Classifier {
* Preallocated buffers for storing image data in. * Preallocated buffers for storing image data in.
*/ */
private int[] intValues = new int[INPUT_WIDTH * INPUT_HEIGHT]; private int[] intValues = new int[INPUT_WIDTH * INPUT_HEIGHT];
List<Integer> list = new ArrayList<>();
/** /**
* The loaded TensorFlow Lite model. * The loaded TensorFlow Lite model.
*/ */
@ -98,7 +93,7 @@ public class Classifier {
Tensor tensor = tflite.getInputTensor(tflite.getInputIndex("input")); Tensor tensor = tflite.getInputTensor(tflite.getInputIndex("input"));
String stringBuilder = " \n" String stringBuilder = " \n"
+"dataType : " + + "dataType : " +
tensor.dataType() + tensor.dataType() +
"\n" + "\n" +
"numBytes : " + "numBytes : " +
@ -123,11 +118,6 @@ public class Classifier {
* BYTES_PER_CHANNEL_NUM); * BYTES_PER_CHANNEL_NUM);
imgData.order(ByteOrder.LITTLE_ENDIAN); imgData.order(ByteOrder.LITTLE_ENDIAN);
if (OpenCVLoader.initDebug()) {
Log.d(TAG, "OpenCv Initialization Success.");
} else {
Log.e(TAG, "OpenCv Initialization Error.");
}
Log.d(TAG, "Tensorflow Lite Image Classifier Initialization Success."); Log.d(TAG, "Tensorflow Lite Image Classifier Initialization Success.");
} }
@ -152,9 +142,6 @@ public class Classifier {
return; return;
} }
imgData.rewind(); imgData.rewind();
Matrix m = new Matrix();
m.setScale(-1, 1);//水平翻转
Bitmap reversePic = Bitmap.createBitmap(bitmap_, 0, 0, bitmap_.getWidth(), bitmap_.getHeight(), m, true);
int W = bitmap_.getWidth(); int W = bitmap_.getWidth();
int H = bitmap_.getHeight(); int H = bitmap_.getHeight();
@ -162,25 +149,22 @@ public class Classifier {
int h_off = max((H - INPUT_HEIGHT) / 2, 0); int h_off = max((H - INPUT_HEIGHT) / 2, 0);
//把每个像素的颜色值转为int 存入intValues //把每个像素的颜色值转为int 存入intValues
reversePic.getPixels(intValues, 0, INPUT_WIDTH, h_off, w_off, INPUT_WIDTH, INPUT_HEIGHT); bitmap_.getPixels(intValues, 0, INPUT_WIDTH, h_off, w_off, INPUT_WIDTH, INPUT_HEIGHT);
// Convert the image to floating point. // Convert the image to floating point.
int pixel = 0;
long startTime = SystemClock.uptimeMillis(); long startTime = SystemClock.uptimeMillis();
for (int i = h_off; i < h_off + INPUT_HEIGHT; ++i) { for (int i = 0; i < intValues.length; i++) {
for (int j = w_off; j < w_off + INPUT_WIDTH; ++j) { final int color = intValues[i];
final int color = intValues[pixel++]; int r1 = Color.red(color);
int r1 = Color.red(color); int g1 = Color.green(color);
int g1 = Color.green(color); int b1 = Color.blue(color);
int b1 = Color.blue(color);
int rr1 = r1 - 123;
int rr1 = r1 - 123; int gg1 = g1 - 117;
int gg1 = g1 - 117; int bb1 = b1 - 104;
int bb1 = b1 - 104;
imgData.putFloat(bb1);
imgData.putFloat(bb1); imgData.putFloat(gg1);
imgData.putFloat(gg1); imgData.putFloat(rr1);
imgData.putFloat(rr1);
}
} }
long endTime = SystemClock.uptimeMillis(); long endTime = SystemClock.uptimeMillis();
Log.d(TAG, "Timecost to put values into ByteBuffer: " + (endTime - startTime) + "ms"); Log.d(TAG, "Timecost to put values into ByteBuffer: " + (endTime - startTime) + "ms");
@ -188,29 +172,67 @@ public class Classifier {
public NsfwBean run(Bitmap bitmap) { public NsfwBean run(Bitmap bitmap) {
Mat mat = new Mat(); Bitmap bitmap_256 = getResizedBitmap(bitmap, 256, 256);
//add alpha
Utils.bitmapToMat(bitmap.copy(Bitmap.Config.ARGB_8888, false), mat, true); saveBitmapFile(bitmap_256);
Mat mat1 = new Mat();
//将原bitmap双线性采样为256*256大小
Imgproc.resize(mat, mat1, new Size(256, 256), 0, 0, Imgproc.INTER_LINEAR);
//add alpha
Bitmap bitmap_256 = Bitmap.createBitmap(256, 256, Bitmap.Config.ARGB_8888);
//convert
Utils.matToBitmap(mat1, bitmap_256);
//Writes image data into byteBuffer //Writes image data into byteBuffer
convertBitmapToByteBuffer(bitmap_256); convertBitmapToByteBuffer(bitmap_256);
long startTime = SystemClock.uptimeMillis(); long startTime = SystemClock.uptimeMillis();
// out // out
float[][] outArray = new float[1][2]; float[][] outArray = new float[1][2];
Log.d(TAG, "lastImgData : " + imgData);
tflite.run(imgData, outArray); tflite.run(imgData, outArray);
long endTime = SystemClock.uptimeMillis(); long endTime = SystemClock.uptimeMillis();
Log.d(TAG, "SFW score :" + outArray[0][0] + ",NSFW score :" + outArray[0][1]);
Log.d(TAG, "Timecost to run model inference: " + (endTime - startTime) + "ms"); Log.d(TAG, "Timecost to run model inference: " + (endTime - startTime) + "ms");
return new NsfwBean(outArray[0][0], outArray[0][1]); return new NsfwBean(outArray[0][0], outArray[0][1]);
} }
public static Bitmap getResizedBitmap(Bitmap bitmap, float newWidth, float newHeight) {
// if (bitmap.getHeight()>bitmap.getWidth()){
// newHeight=300f;
// newWidth= (int) (bitmap.getWidth()*(newHeight/(float) bitmap.getHeight()));
// }else{
// newWidth=300f;
// newHeight= (int) (bitmap.getHeight()*(newWidth/(float) bitmap.getWidth()));
// }
Bitmap resizedBitmap = Bitmap.createBitmap((int) newWidth, (int) newHeight, Bitmap.Config.ARGB_8888);
float scaleX = newWidth / (float) bitmap.getWidth();
float scaleY = newHeight / (float) bitmap.getHeight();
float pivotX = 0;
float pivotY = 0;
Matrix scaleMatrix = new Matrix();
scaleMatrix.setScale(scaleX, scaleY, pivotX, pivotY);
Canvas canvas = new Canvas(resizedBitmap);
canvas.setMatrix(scaleMatrix);
canvas.drawBitmap(bitmap, 0, 0, new Paint(Paint.FILTER_BITMAP_FLAG |
Paint.DITHER_FLAG |
Paint.ANTI_ALIAS_FLAG));
return resizedBitmap;
}
public void saveBitmapFile(Bitmap bitmap) {
String fp = Environment.getExternalStorageDirectory().getAbsolutePath() + "/333333333.bmp";
File file = new File(fp);//将要保存图片的路径
try {
BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(file));
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, bos);
bos.flush();
bos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
/** /**
* Closes the interpreter and model to release resources. * Closes the interpreter and model to release resources.
@ -219,15 +241,15 @@ public class Classifier {
if (tflite != null) { if (tflite != null) {
tflite.close(); tflite.close();
tflite = null; tflite = null;
Log.d(TAG,"Tensorflow Lite Image Classifier close."); Log.d(TAG, "Tensorflow Lite Image Classifier close.");
} }
if (gpuDelegate != null) { if (gpuDelegate != null) {
gpuDelegate.close(); gpuDelegate.close();
Log.d(TAG,"Tensorflow Lite Image gpuDelegate close."); Log.d(TAG, "Tensorflow Lite Image gpuDelegate close.");
gpuDelegate = null; gpuDelegate = null;
} }
tfliteModel = null; tfliteModel = null;
Log.d(TAG,"Tensorflow Lite destroyed."); Log.d(TAG, "Tensorflow Lite destroyed.");
} }
} }

@ -1,10 +0,0 @@
package com.zwy.nsfw;
public class JniLoader {
static {
System.loadLibrary("native-lib");
}
public static native void argb2bgr(byte[] rgbSrc,byte[] bgrDesc);
}

@ -1,17 +0,0 @@
apply plugin: 'com.android.library'
android {
compileSdkVersion 28
defaultConfig {
minSdkVersion 15
targetSdkVersion 28
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt'
}
}
}

@ -1,199 +0,0 @@
/*
* This file is auto-generated. DO NOT MODIFY.
* Original file: /Users/jason/AndroidStudioProjects/open_nsfw_android/openCVLibrary340/src/main/aidl/org/opencv/engine/OpenCVEngineInterface.aidl
*/
package org.opencv.engine;
/**
* Class provides a Java interface for OpenCV Engine Service. It's synchronous with native OpenCVEngine class.
*/
public interface OpenCVEngineInterface extends android.os.IInterface
{
/** Local-side IPC implementation stub class. */
public static abstract class Stub extends android.os.Binder implements org.opencv.engine.OpenCVEngineInterface
{
private static final java.lang.String DESCRIPTOR = "org.opencv.engine.OpenCVEngineInterface";
/** Construct the stub at attach it to the interface. */
public Stub()
{
this.attachInterface(this, DESCRIPTOR);
}
/**
* Cast an IBinder object into an org.opencv.engine.OpenCVEngineInterface interface,
* generating a proxy if needed.
*/
public static org.opencv.engine.OpenCVEngineInterface asInterface(android.os.IBinder obj)
{
if ((obj==null)) {
return null;
}
android.os.IInterface iin = obj.queryLocalInterface(DESCRIPTOR);
if (((iin!=null)&&(iin instanceof org.opencv.engine.OpenCVEngineInterface))) {
return ((org.opencv.engine.OpenCVEngineInterface)iin);
}
return new org.opencv.engine.OpenCVEngineInterface.Stub.Proxy(obj);
}
@Override public android.os.IBinder asBinder()
{
return this;
}
@Override public boolean onTransact(int code, android.os.Parcel data, android.os.Parcel reply, int flags) throws android.os.RemoteException
{
java.lang.String descriptor = DESCRIPTOR;
switch (code)
{
case INTERFACE_TRANSACTION:
{
reply.writeString(descriptor);
return true;
}
case TRANSACTION_getEngineVersion:
{
data.enforceInterface(descriptor);
int _result = this.getEngineVersion();
reply.writeNoException();
reply.writeInt(_result);
return true;
}
case TRANSACTION_getLibPathByVersion:
{
data.enforceInterface(descriptor);
java.lang.String _arg0;
_arg0 = data.readString();
java.lang.String _result = this.getLibPathByVersion(_arg0);
reply.writeNoException();
reply.writeString(_result);
return true;
}
case TRANSACTION_installVersion:
{
data.enforceInterface(descriptor);
java.lang.String _arg0;
_arg0 = data.readString();
boolean _result = this.installVersion(_arg0);
reply.writeNoException();
reply.writeInt(((_result)?(1):(0)));
return true;
}
case TRANSACTION_getLibraryList:
{
data.enforceInterface(descriptor);
java.lang.String _arg0;
_arg0 = data.readString();
java.lang.String _result = this.getLibraryList(_arg0);
reply.writeNoException();
reply.writeString(_result);
return true;
}
default:
{
return super.onTransact(code, data, reply, flags);
}
}
}
private static class Proxy implements org.opencv.engine.OpenCVEngineInterface
{
private android.os.IBinder mRemote;
Proxy(android.os.IBinder remote)
{
mRemote = remote;
}
@Override public android.os.IBinder asBinder()
{
return mRemote;
}
public java.lang.String getInterfaceDescriptor()
{
return DESCRIPTOR;
}
@Override public int getEngineVersion() throws android.os.RemoteException
{
android.os.Parcel _data = android.os.Parcel.obtain();
android.os.Parcel _reply = android.os.Parcel.obtain();
int _result;
try {
_data.writeInterfaceToken(DESCRIPTOR);
mRemote.transact(Stub.TRANSACTION_getEngineVersion, _data, _reply, 0);
_reply.readException();
_result = _reply.readInt();
}
finally {
_reply.recycle();
_data.recycle();
}
return _result;
}
@Override public java.lang.String getLibPathByVersion(java.lang.String version) throws android.os.RemoteException
{
android.os.Parcel _data = android.os.Parcel.obtain();
android.os.Parcel _reply = android.os.Parcel.obtain();
java.lang.String _result;
try {
_data.writeInterfaceToken(DESCRIPTOR);
_data.writeString(version);
mRemote.transact(Stub.TRANSACTION_getLibPathByVersion, _data, _reply, 0);
_reply.readException();
_result = _reply.readString();
}
finally {
_reply.recycle();
_data.recycle();
}
return _result;
}
/**
* Tries to install defined version of OpenCV from Google Play Market.
* @param OpenCV version.
* @return Returns true if installation was successful or OpenCV package has been already installed.
*/
@Override public boolean installVersion(java.lang.String version) throws android.os.RemoteException
{
android.os.Parcel _data = android.os.Parcel.obtain();
android.os.Parcel _reply = android.os.Parcel.obtain();
boolean _result;
try {
_data.writeInterfaceToken(DESCRIPTOR);
_data.writeString(version);
mRemote.transact(Stub.TRANSACTION_installVersion, _data, _reply, 0);
_reply.readException();
_result = (0!=_reply.readInt());
}
finally {
_reply.recycle();
_data.recycle();
}
return _result;
}
@Override public java.lang.String getLibraryList(java.lang.String version) throws android.os.RemoteException
{
android.os.Parcel _data = android.os.Parcel.obtain();
android.os.Parcel _reply = android.os.Parcel.obtain();
java.lang.String _result;
try {
_data.writeInterfaceToken(DESCRIPTOR);
_data.writeString(version);
mRemote.transact(Stub.TRANSACTION_getLibraryList, _data, _reply, 0);
_reply.readException();
_result = _reply.readString();
}
finally {
_reply.recycle();
_data.recycle();
}
return _result;
}
}
static final int TRANSACTION_getEngineVersion = (android.os.IBinder.FIRST_CALL_TRANSACTION + 0);
static final int TRANSACTION_getLibPathByVersion = (android.os.IBinder.FIRST_CALL_TRANSACTION + 1);
static final int TRANSACTION_installVersion = (android.os.IBinder.FIRST_CALL_TRANSACTION + 2);
static final int TRANSACTION_getLibraryList = (android.os.IBinder.FIRST_CALL_TRANSACTION + 3);
}
public int getEngineVersion() throws android.os.RemoteException;
public java.lang.String getLibPathByVersion(java.lang.String version) throws android.os.RemoteException;
/**
* Tries to install defined version of OpenCV from Google Play Market.
* @param OpenCV version.
* @return Returns true if installation was successful or OpenCV package has been already installed.
*/
public boolean installVersion(java.lang.String version) throws android.os.RemoteException;
public java.lang.String getLibraryList(java.lang.String version) throws android.os.RemoteException;
}

@ -1,13 +0,0 @@
/**
* Automatically generated file. DO NOT MODIFY
*/
package org.opencv;
public final class BuildConfig {
public static final boolean DEBUG = Boolean.parseBoolean("true");
public static final String APPLICATION_ID = "org.opencv";
public static final String BUILD_TYPE = "debug";
public static final String FLAVOR = "";
public static final int VERSION_CODE = -1;
public static final String VERSION_NAME = "";
}

@ -1,9 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.opencv" >
<uses-sdk
android:minSdkVersion="15"
android:targetSdkVersion="28" />
</manifest>

@ -1 +0,0 @@
[{"outputType":{"type":"AAPT_FRIENDLY_MERGED_MANIFESTS"},"apkData":{"type":"MAIN","splits":[],"versionCode":-1,"enabled":true,"outputFile":"openCVLibrary340-debug.aar","fullName":"debug","baseName":"debug"},"path":"AndroidManifest.xml","properties":{"packageId":"org.opencv","split":""}}]

@ -1,9 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.opencv" >
<uses-sdk
android:minSdkVersion="15"
android:targetSdkVersion="28" />
</manifest>

@ -1 +0,0 @@
[{"outputType":{"type":"MERGED_MANIFESTS"},"apkData":{"type":"MAIN","splits":[],"versionCode":-1,"enabled":true,"outputFile":"openCVLibrary340-debug.aar","fullName":"debug","baseName":"debug"},"path":"../../library_manifest/debug/AndroidManifest.xml","properties":{"packageId":"org.opencv","split":""}}]

@ -1,25 +0,0 @@
-- Merging decision tree log ---
manifest
ADDED from /Users/jason/AndroidStudioProjects/open_nsfw_android/openCVLibrary340/src/main/AndroidManifest.xml:2:1-5:12
INJECTED from /Users/jason/AndroidStudioProjects/open_nsfw_android/openCVLibrary340/src/main/AndroidManifest.xml:2:1-5:12
INJECTED from /Users/jason/AndroidStudioProjects/open_nsfw_android/openCVLibrary340/src/main/AndroidManifest.xml:2:1-5:12
package
ADDED from /Users/jason/AndroidStudioProjects/open_nsfw_android/openCVLibrary340/src/main/AndroidManifest.xml:3:7-27
INJECTED from /Users/jason/AndroidStudioProjects/open_nsfw_android/openCVLibrary340/src/main/AndroidManifest.xml
INJECTED from /Users/jason/AndroidStudioProjects/open_nsfw_android/openCVLibrary340/src/main/AndroidManifest.xml
xmlns:android
ADDED from /Users/jason/AndroidStudioProjects/open_nsfw_android/openCVLibrary340/src/main/AndroidManifest.xml:2:11-69
uses-sdk
INJECTED from /Users/jason/AndroidStudioProjects/open_nsfw_android/openCVLibrary340/src/main/AndroidManifest.xml reason: use-sdk injection requested
INJECTED from /Users/jason/AndroidStudioProjects/open_nsfw_android/openCVLibrary340/src/main/AndroidManifest.xml
INJECTED from /Users/jason/AndroidStudioProjects/open_nsfw_android/openCVLibrary340/src/main/AndroidManifest.xml
INJECTED from /Users/jason/AndroidStudioProjects/open_nsfw_android/openCVLibrary340/src/main/AndroidManifest.xml
INJECTED from /Users/jason/AndroidStudioProjects/open_nsfw_android/openCVLibrary340/src/main/AndroidManifest.xml
android:targetSdkVersion
INJECTED from /Users/jason/AndroidStudioProjects/open_nsfw_android/openCVLibrary340/src/main/AndroidManifest.xml
ADDED from /Users/jason/AndroidStudioProjects/open_nsfw_android/openCVLibrary340/src/main/AndroidManifest.xml
INJECTED from /Users/jason/AndroidStudioProjects/open_nsfw_android/openCVLibrary340/src/main/AndroidManifest.xml
android:minSdkVersion
INJECTED from /Users/jason/AndroidStudioProjects/open_nsfw_android/openCVLibrary340/src/main/AndroidManifest.xml
ADDED from /Users/jason/AndroidStudioProjects/open_nsfw_android/openCVLibrary340/src/main/AndroidManifest.xml
INJECTED from /Users/jason/AndroidStudioProjects/open_nsfw_android/openCVLibrary340/src/main/AndroidManifest.xml

@ -1,9 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<lint>
<issue id="InlinedApi">
<ignore path="src\org\opencv\android\JavaCameraView.java" />
</issue>
<issue id="NewApi">
<ignore path="src\org\opencv\android\JavaCameraView.java" />
</issue>
</lint>

@ -1,5 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.opencv">
</manifest>

@ -1,33 +0,0 @@
package org.opencv.engine;
/**
* Class provides a Java interface for OpenCV Engine Service. It's synchronous with native OpenCVEngine class.
*/
interface OpenCVEngineInterface
{
/**
* @return Returns service version.
*/
int getEngineVersion();
/**
* Finds an installed OpenCV library.
* @param OpenCV version.
* @return Returns path to OpenCV native libs or an empty string if OpenCV can not be found.
*/
String getLibPathByVersion(String version);
/**
* Tries to install defined version of OpenCV from Google Play Market.
* @param OpenCV version.
* @return Returns true if installation was successful or OpenCV package has been already installed.
*/
boolean installVersion(String version);
/**
* Returns list of libraries in loading order, separated by semicolon.
* @param OpenCV version.
* @return Returns names of OpenCV libraries, separated by semicolon.
*/
String getLibraryList(String version);
}

@ -1,391 +0,0 @@
package org.opencv.android;
import java.io.File;
import java.util.StringTokenizer;
import org.opencv.core.Core;
import org.opencv.engine.OpenCVEngineInterface;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.net.Uri;
import android.os.IBinder;
import android.os.RemoteException;
import android.util.Log;
class AsyncServiceHelper
{
public static boolean initOpenCV(String Version, final Context AppContext,
final LoaderCallbackInterface Callback)
{
AsyncServiceHelper helper = new AsyncServiceHelper(Version, AppContext, Callback);
Intent intent = new Intent("org.opencv.engine.BIND");
intent.setPackage("org.opencv.engine");
if (AppContext.bindService(intent, helper.mServiceConnection, Context.BIND_AUTO_CREATE))
{
return true;
}
else
{
AppContext.unbindService(helper.mServiceConnection);
InstallService(AppContext, Callback);
return false;
}
}
protected AsyncServiceHelper(String Version, Context AppContext, LoaderCallbackInterface Callback)
{
mOpenCVersion = Version;
mUserAppCallback = Callback;
mAppContext = AppContext;
}
protected static final String TAG = "OpenCVManager/Helper";
protected static final int MINIMUM_ENGINE_VERSION = 2;
protected OpenCVEngineInterface mEngineService;
protected LoaderCallbackInterface mUserAppCallback;
protected String mOpenCVersion;
protected Context mAppContext;
protected static boolean mServiceInstallationProgress = false;
protected static boolean mLibraryInstallationProgress = false;
protected static boolean InstallServiceQuiet(Context context)
{
boolean result = true;
try
{
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(OPEN_CV_SERVICE_URL));
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
}
catch(Exception e)
{
result = false;
}
return result;
}
protected static void InstallService(final Context AppContext, final LoaderCallbackInterface Callback)
{
if (!mServiceInstallationProgress)
{
Log.d(TAG, "Request new service installation");
InstallCallbackInterface InstallQuery = new InstallCallbackInterface() {
private LoaderCallbackInterface mUserAppCallback = Callback;
public String getPackageName()
{
return "OpenCV Manager";
}
public void install() {
Log.d(TAG, "Trying to install OpenCV Manager via Google Play");
boolean result = InstallServiceQuiet(AppContext);
if (result)
{
mServiceInstallationProgress = true;
Log.d(TAG, "Package installation started");
}
else
{
Log.d(TAG, "OpenCV package was not installed!");
int Status = LoaderCallbackInterface.MARKET_ERROR;
Log.d(TAG, "Init finished with status " + Status);
Log.d(TAG, "Unbind from service");
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(Status);
}
}
public void cancel()
{
Log.d(TAG, "OpenCV library installation was canceled");
int Status = LoaderCallbackInterface.INSTALL_CANCELED;
Log.d(TAG, "Init finished with status " + Status);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(Status);
}
public void wait_install()
{
Log.e(TAG, "Instalation was not started! Nothing to wait!");
}
};
Callback.onPackageInstall(InstallCallbackInterface.NEW_INSTALLATION, InstallQuery);
}
else
{
Log.d(TAG, "Waiting current installation process");
InstallCallbackInterface WaitQuery = new InstallCallbackInterface() {
private LoaderCallbackInterface mUserAppCallback = Callback;
public String getPackageName()
{
return "OpenCV Manager";
}
public void install()
{
Log.e(TAG, "Nothing to install we just wait current installation");
}
public void cancel()
{
Log.d(TAG, "Waiting for OpenCV canceled by user");
mServiceInstallationProgress = false;
int Status = LoaderCallbackInterface.INSTALL_CANCELED;
Log.d(TAG, "Init finished with status " + Status);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(Status);
}
public void wait_install()
{
InstallServiceQuiet(AppContext);
}
};
Callback.onPackageInstall(InstallCallbackInterface.INSTALLATION_PROGRESS, WaitQuery);
}
}
/**
* URL of OpenCV Manager page on Google Play Market.
*/
protected static final String OPEN_CV_SERVICE_URL = "market://details?id=org.opencv.engine";
protected ServiceConnection mServiceConnection = new ServiceConnection()
{
public void onServiceConnected(ComponentName className, IBinder service)
{
Log.d(TAG, "Service connection created");
mEngineService = OpenCVEngineInterface.Stub.asInterface(service);
if (null == mEngineService)
{
Log.d(TAG, "OpenCV Manager Service connection fails. May be service was not installed?");
InstallService(mAppContext, mUserAppCallback);
}
else
{
mServiceInstallationProgress = false;
try
{
if (mEngineService.getEngineVersion() < MINIMUM_ENGINE_VERSION)
{
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INCOMPATIBLE_MANAGER_VERSION);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INCOMPATIBLE_MANAGER_VERSION);
return;
}
Log.d(TAG, "Trying to get library path");
String path = mEngineService.getLibPathByVersion(mOpenCVersion);
if ((null == path) || (path.length() == 0))
{
if (!mLibraryInstallationProgress)
{
InstallCallbackInterface InstallQuery = new InstallCallbackInterface() {
public String getPackageName()
{
return "OpenCV library";
}
public void install() {
Log.d(TAG, "Trying to install OpenCV lib via Google Play");
try
{
if (mEngineService.installVersion(mOpenCVersion))
{
mLibraryInstallationProgress = true;
Log.d(TAG, "Package installation started");
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
}
else
{
Log.d(TAG, "OpenCV package was not installed!");
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.MARKET_ERROR);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.MARKET_ERROR);
}
} catch (RemoteException e) {
e.printStackTrace();;
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INIT_FAILED);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INIT_FAILED);
}
}
public void cancel() {
Log.d(TAG, "OpenCV library installation was canceled");
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INSTALL_CANCELED);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INSTALL_CANCELED);
}
public void wait_install() {
Log.e(TAG, "Installation was not started! Nothing to wait!");
}
};
mUserAppCallback.onPackageInstall(InstallCallbackInterface.NEW_INSTALLATION, InstallQuery);
}
else
{
InstallCallbackInterface WaitQuery = new InstallCallbackInterface() {
public String getPackageName()
{
return "OpenCV library";
}
public void install() {
Log.e(TAG, "Nothing to install we just wait current installation");
}
public void cancel()
{
Log.d(TAG, "OpenCV library installation was canceled");
mLibraryInstallationProgress = false;
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INSTALL_CANCELED);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INSTALL_CANCELED);
}
public void wait_install() {
Log.d(TAG, "Waiting for current installation");
try
{
if (!mEngineService.installVersion(mOpenCVersion))
{
Log.d(TAG, "OpenCV package was not installed!");
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.MARKET_ERROR);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.MARKET_ERROR);
}
else
{
Log.d(TAG, "Wating for package installation");
}
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
} catch (RemoteException e) {
e.printStackTrace();
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INIT_FAILED);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INIT_FAILED);
}
}
};
mUserAppCallback.onPackageInstall(InstallCallbackInterface.INSTALLATION_PROGRESS, WaitQuery);
}
return;
}
else
{
Log.d(TAG, "Trying to get library list");
mLibraryInstallationProgress = false;
String libs = mEngineService.getLibraryList(mOpenCVersion);
Log.d(TAG, "Library list: \"" + libs + "\"");
Log.d(TAG, "First attempt to load libs");
int status;
if (initOpenCVLibs(path, libs))
{
Log.d(TAG, "First attempt to load libs is OK");
String eol = System.getProperty("line.separator");
for (String str : Core.getBuildInformation().split(eol))
Log.i(TAG, str);
status = LoaderCallbackInterface.SUCCESS;
}
else
{
Log.d(TAG, "First attempt to load libs fails");
status = LoaderCallbackInterface.INIT_FAILED;
}
Log.d(TAG, "Init finished with status " + status);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(status);
}
}
catch (RemoteException e)
{
e.printStackTrace();
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INIT_FAILED);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INIT_FAILED);
}
}
}
public void onServiceDisconnected(ComponentName className)
{
mEngineService = null;
}
};
private boolean loadLibrary(String AbsPath)
{
boolean result = true;
Log.d(TAG, "Trying to load library " + AbsPath);
try
{
System.load(AbsPath);
Log.d(TAG, "OpenCV libs init was ok!");
}
catch(UnsatisfiedLinkError e)
{
Log.d(TAG, "Cannot load library \"" + AbsPath + "\"");
e.printStackTrace();
result &= false;
}
return result;
}
private boolean initOpenCVLibs(String Path, String Libs)
{
Log.d(TAG, "Trying to init OpenCV libs");
if ((null != Path) && (Path.length() != 0))
{
boolean result = true;
if ((null != Libs) && (Libs.length() != 0))
{
Log.d(TAG, "Trying to load libs by dependency list");
StringTokenizer splitter = new StringTokenizer(Libs, ";");
while(splitter.hasMoreTokens())
{
String AbsLibraryPath = Path + File.separator + splitter.nextToken();
result &= loadLibrary(AbsLibraryPath);
}
}
else
{
// If the dependencies list is not defined or empty.
String AbsLibraryPath = Path + File.separator + "libopencv_java3.so";
result &= loadLibrary(AbsLibraryPath);
}
return result;
}
else
{
Log.d(TAG, "Library path \"" + Path + "\" is empty");
return false;
}
}
}

@ -1,141 +0,0 @@
package org.opencv.android;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.DialogInterface.OnClickListener;
import android.util.Log;
/**
* Basic implementation of LoaderCallbackInterface.
*/
public abstract class BaseLoaderCallback implements LoaderCallbackInterface {
public BaseLoaderCallback(Context AppContext) {
mAppContext = AppContext;
}
public void onManagerConnected(int status)
{
switch (status)
{
/** OpenCV initialization was successful. **/
case LoaderCallbackInterface.SUCCESS:
{
/** Application must override this method to handle successful library initialization. **/
} break;
/** OpenCV loader can not start Google Play Market. **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.e(TAG, "Package installation failed!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Package installation failed!");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
finish();
}
});
MarketErrorMessage.show();
} break;
/** Package installation has been canceled. **/
case LoaderCallbackInterface.INSTALL_CANCELED:
{
Log.d(TAG, "OpenCV library installation was canceled by user");
finish();
} break;
/** Application is incompatible with this version of OpenCV Manager. Possibly, a service update is required. **/
case LoaderCallbackInterface.INCOMPATIBLE_MANAGER_VERSION:
{
Log.d(TAG, "OpenCV Manager Service is uncompatible with this app!");
AlertDialog IncomatibilityMessage = new AlertDialog.Builder(mAppContext).create();
IncomatibilityMessage.setTitle("OpenCV Manager");
IncomatibilityMessage.setMessage("OpenCV Manager service is incompatible with this app. Try to update it via Google Play.");
IncomatibilityMessage.setCancelable(false); // This blocks the 'BACK' button
IncomatibilityMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
finish();
}
});
IncomatibilityMessage.show();
} break;
/** Other status, i.e. INIT_FAILED. **/
default:
{
Log.e(TAG, "OpenCV loading failed!");
AlertDialog InitFailedDialog = new AlertDialog.Builder(mAppContext).create();
InitFailedDialog.setTitle("OpenCV error");
InitFailedDialog.setMessage("OpenCV was not initialised correctly. Application will be shut down");
InitFailedDialog.setCancelable(false); // This blocks the 'BACK' button
InitFailedDialog.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
finish();
}
});
InitFailedDialog.show();
} break;
}
}
public void onPackageInstall(final int operation, final InstallCallbackInterface callback)
{
switch (operation)
{
case InstallCallbackInterface.NEW_INSTALLATION:
{
AlertDialog InstallMessage = new AlertDialog.Builder(mAppContext).create();
InstallMessage.setTitle("Package not found");
InstallMessage.setMessage(callback.getPackageName() + " package was not found! Try to install it?");
InstallMessage.setCancelable(false); // This blocks the 'BACK' button
InstallMessage.setButton(AlertDialog.BUTTON_POSITIVE, "Yes", new OnClickListener()
{
public void onClick(DialogInterface dialog, int which)
{
callback.install();
}
});
InstallMessage.setButton(AlertDialog.BUTTON_NEGATIVE, "No", new OnClickListener() {
public void onClick(DialogInterface dialog, int which)
{
callback.cancel();
}
});
InstallMessage.show();
} break;
case InstallCallbackInterface.INSTALLATION_PROGRESS:
{
AlertDialog WaitMessage = new AlertDialog.Builder(mAppContext).create();
WaitMessage.setTitle("OpenCV is not ready");
WaitMessage.setMessage("Installation is in progress. Wait or exit?");
WaitMessage.setCancelable(false); // This blocks the 'BACK' button
WaitMessage.setButton(AlertDialog.BUTTON_POSITIVE, "Wait", new OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
callback.wait_install();
}
});
WaitMessage.setButton(AlertDialog.BUTTON_NEGATIVE, "Exit", new OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
callback.cancel();
}
});
WaitMessage.show();
} break;
}
}
void finish()
{
((Activity) mAppContext).finish();
}
protected Context mAppContext;
private final static String TAG = "OpenCVLoader/BaseLoaderCallback";
}

@ -1,302 +0,0 @@
package org.opencv.android;
import java.util.Arrays;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
@TargetApi(21)
public class Camera2Renderer extends CameraGLRendererBase {
protected final String LOGTAG = "Camera2Renderer";
private CameraDevice mCameraDevice;
private CameraCaptureSession mCaptureSession;
private CaptureRequest.Builder mPreviewRequestBuilder;
private String mCameraID;
private Size mPreviewSize = new Size(-1, -1);
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
Camera2Renderer(CameraGLSurfaceView view) {
super(view);
}
@Override
protected void doStart() {
Log.d(LOGTAG, "doStart");
startBackgroundThread();
super.doStart();
}
@Override
protected void doStop() {
Log.d(LOGTAG, "doStop");
super.doStop();
stopBackgroundThread();
}
boolean cacPreviewSize(final int width, final int height) {
Log.i(LOGTAG, "cacPreviewSize: "+width+"x"+height);
if(mCameraID == null) {
Log.e(LOGTAG, "Camera isn't initialized!");
return false;
}
CameraManager manager = (CameraManager) mView.getContext()
.getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager
.getCameraCharacteristics(mCameraID);
StreamConfigurationMap map = characteristics
.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
int bestWidth = 0, bestHeight = 0;
float aspect = (float)width / height;
for (Size psize : map.getOutputSizes(SurfaceTexture.class)) {
int w = psize.getWidth(), h = psize.getHeight();
Log.d(LOGTAG, "trying size: "+w+"x"+h);
if ( width >= w && height >= h &&
bestWidth <= w && bestHeight <= h &&
Math.abs(aspect - (float)w/h) < 0.2 ) {
bestWidth = w;
bestHeight = h;
}
}
Log.i(LOGTAG, "best size: "+bestWidth+"x"+bestHeight);
if( bestWidth == 0 || bestHeight == 0 ||
mPreviewSize.getWidth() == bestWidth &&
mPreviewSize.getHeight() == bestHeight )
return false;
else {
mPreviewSize = new Size(bestWidth, bestHeight);
return true;
}
} catch (CameraAccessException e) {
Log.e(LOGTAG, "cacPreviewSize - Camera Access Exception");
} catch (IllegalArgumentException e) {
Log.e(LOGTAG, "cacPreviewSize - Illegal Argument Exception");
} catch (SecurityException e) {
Log.e(LOGTAG, "cacPreviewSize - Security Exception");
}
return false;
}
@Override
protected void openCamera(int id) {
Log.i(LOGTAG, "openCamera");
CameraManager manager = (CameraManager) mView.getContext().getSystemService(Context.CAMERA_SERVICE);
try {
String camList[] = manager.getCameraIdList();
if(camList.length == 0) {
Log.e(LOGTAG, "Error: camera isn't detected.");
return;
}
if(id == CameraBridgeViewBase.CAMERA_ID_ANY) {
mCameraID = camList[0];
} else {
for (String cameraID : camList) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraID);
if( id == CameraBridgeViewBase.CAMERA_ID_BACK &&
characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK ||
id == CameraBridgeViewBase.CAMERA_ID_FRONT &&
characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) {
mCameraID = cameraID;
break;
}
}
}
if(mCameraID != null) {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException(
"Time out waiting to lock camera opening.");
}
Log.i(LOGTAG, "Opening camera: " + mCameraID);
manager.openCamera(mCameraID, mStateCallback, mBackgroundHandler);
}
} catch (CameraAccessException e) {
Log.e(LOGTAG, "OpenCamera - Camera Access Exception");
} catch (IllegalArgumentException e) {
Log.e(LOGTAG, "OpenCamera - Illegal Argument Exception");
} catch (SecurityException e) {
Log.e(LOGTAG, "OpenCamera - Security Exception");
} catch (InterruptedException e) {
Log.e(LOGTAG, "OpenCamera - Interrupted Exception");
}
}
@Override
protected void closeCamera() {
Log.i(LOGTAG, "closeCamera");
try {
mCameraOpenCloseLock.acquire();
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
mCameraOpenCloseLock.release();
}
}
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
mCameraOpenCloseLock.release();
createCameraPreviewSession();
}
@Override
public void onDisconnected(CameraDevice cameraDevice) {
cameraDevice.close();
mCameraDevice = null;
mCameraOpenCloseLock.release();
}
@Override
public void onError(CameraDevice cameraDevice, int error) {
cameraDevice.close();
mCameraDevice = null;
mCameraOpenCloseLock.release();
}
};
private void createCameraPreviewSession() {
int w=mPreviewSize.getWidth(), h=mPreviewSize.getHeight();
Log.i(LOGTAG, "createCameraPreviewSession("+w+"x"+h+")");
if(w<0 || h<0)
return;
try {
mCameraOpenCloseLock.acquire();
if (null == mCameraDevice) {
mCameraOpenCloseLock.release();
Log.e(LOGTAG, "createCameraPreviewSession: camera isn't opened");
return;
}
if (null != mCaptureSession) {
mCameraOpenCloseLock.release();
Log.e(LOGTAG, "createCameraPreviewSession: mCaptureSession is already started");
return;
}
if(null == mSTexture) {
mCameraOpenCloseLock.release();
Log.e(LOGTAG, "createCameraPreviewSession: preview SurfaceTexture is null");
return;
}
mSTexture.setDefaultBufferSize(w, h);
Surface surface = new Surface(mSTexture);
mPreviewRequestBuilder = mCameraDevice
.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
mCameraDevice.createCaptureSession(Arrays.asList(surface),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured( CameraCaptureSession cameraCaptureSession) {
mCaptureSession = cameraCaptureSession;
try {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), null, mBackgroundHandler);
Log.i(LOGTAG, "CameraPreviewSession has been started");
} catch (CameraAccessException e) {
Log.e(LOGTAG, "createCaptureSession failed");
}
mCameraOpenCloseLock.release();
}
@Override
public void onConfigureFailed(
CameraCaptureSession cameraCaptureSession) {
Log.e(LOGTAG, "createCameraPreviewSession failed");
mCameraOpenCloseLock.release();
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
Log.e(LOGTAG, "createCameraPreviewSession");
} catch (InterruptedException e) {
throw new RuntimeException(
"Interrupted while createCameraPreviewSession", e);
}
finally {
//mCameraOpenCloseLock.release();
}
}
private void startBackgroundThread() {
Log.i(LOGTAG, "startBackgroundThread");
stopBackgroundThread();
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
private void stopBackgroundThread() {
Log.i(LOGTAG, "stopBackgroundThread");
if(mBackgroundThread == null)
return;
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
Log.e(LOGTAG, "stopBackgroundThread");
}
}
@Override
protected void setCameraPreviewSize(int width, int height) {
Log.i(LOGTAG, "setCameraPreviewSize("+width+"x"+height+")");
if(mMaxCameraWidth > 0 && mMaxCameraWidth < width) width = mMaxCameraWidth;
if(mMaxCameraHeight > 0 && mMaxCameraHeight < height) height = mMaxCameraHeight;
try {
mCameraOpenCloseLock.acquire();
boolean needReconfig = cacPreviewSize(width, height);
mCameraWidth = mPreviewSize.getWidth();
mCameraHeight = mPreviewSize.getHeight();
if( !needReconfig ) {
mCameraOpenCloseLock.release();
return;
}
if (null != mCaptureSession) {
Log.d(LOGTAG, "closing existing previewSession");
mCaptureSession.close();
mCaptureSession = null;
}
mCameraOpenCloseLock.release();
createCameraPreviewSession();
} catch (InterruptedException e) {
mCameraOpenCloseLock.release();
throw new RuntimeException("Interrupted while setCameraPreviewSize.", e);
}
}
}

@ -1,495 +0,0 @@
package org.opencv.android;
import java.util.List;
import org.opencv.BuildConfig;
import org.opencv.R;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
/**
* This is a basic class, implementing the interaction with Camera and OpenCV library.
* The main responsibility of it - is to control when camera can be enabled, process the frame,
* call external listener to make any adjustments to the frame and then draw the resulting
* frame to the screen.
* The clients shall implement CvCameraViewListener.
*/
public abstract class CameraBridgeViewBase extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "CameraBridge";
private static final int MAX_UNSPECIFIED = -1;
private static final int STOPPED = 0;
private static final int STARTED = 1;
private int mState = STOPPED;
private Bitmap mCacheBitmap;
private CvCameraViewListener2 mListener;
private boolean mSurfaceExist;
private final Object mSyncObject = new Object();
protected int mFrameWidth;
protected int mFrameHeight;
protected int mMaxHeight;
protected int mMaxWidth;
protected float mScale = 0;
protected int mPreviewFormat = RGBA;
protected int mCameraIndex = CAMERA_ID_ANY;
protected boolean mEnabled;
protected FpsMeter mFpsMeter = null;
public static final int CAMERA_ID_ANY = -1;
public static final int CAMERA_ID_BACK = 99;
public static final int CAMERA_ID_FRONT = 98;
public static final int RGBA = 1;
public static final int GRAY = 2;
public CameraBridgeViewBase(Context context, int cameraId) {
super(context);
mCameraIndex = cameraId;
getHolder().addCallback(this);
mMaxWidth = MAX_UNSPECIFIED;
mMaxHeight = MAX_UNSPECIFIED;
}
public CameraBridgeViewBase(Context context, AttributeSet attrs) {
super(context, attrs);
int count = attrs.getAttributeCount();
Log.d(TAG, "Attr count: " + Integer.valueOf(count));
TypedArray styledAttrs = getContext().obtainStyledAttributes(attrs, R.styleable.CameraBridgeViewBase);
if (styledAttrs.getBoolean(R.styleable.CameraBridgeViewBase_show_fps, false))
enableFpsMeter();
mCameraIndex = styledAttrs.getInt(R.styleable.CameraBridgeViewBase_camera_id, -1);
getHolder().addCallback(this);
mMaxWidth = MAX_UNSPECIFIED;
mMaxHeight = MAX_UNSPECIFIED;
styledAttrs.recycle();
}
/**
* Sets the camera index
* @param cameraIndex new camera index
*/
public void setCameraIndex(int cameraIndex) {
this.mCameraIndex = cameraIndex;
}
public interface CvCameraViewListener {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* @param width - the width of the frames that will be delivered
* @param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when delivery of the frame needs to be done.
* The returned values - is a modified frame which needs to be displayed on the screen.
* TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc)
*/
public Mat onCameraFrame(Mat inputFrame);
}
public interface CvCameraViewListener2 {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* @param width - the width of the frames that will be delivered
* @param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when delivery of the frame needs to be done.
* The returned values - is a modified frame which needs to be displayed on the screen.
* TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc)
*/
public Mat onCameraFrame(CvCameraViewFrame inputFrame);
};
protected class CvCameraViewListenerAdapter implements CvCameraViewListener2 {
public CvCameraViewListenerAdapter(CvCameraViewListener oldStypeListener) {
mOldStyleListener = oldStypeListener;
}
public void onCameraViewStarted(int width, int height) {
mOldStyleListener.onCameraViewStarted(width, height);
}
public void onCameraViewStopped() {
mOldStyleListener.onCameraViewStopped();
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
Mat result = null;
switch (mPreviewFormat) {
case RGBA:
result = mOldStyleListener.onCameraFrame(inputFrame.rgba());
break;
case GRAY:
result = mOldStyleListener.onCameraFrame(inputFrame.gray());
break;
default:
Log.e(TAG, "Invalid frame format! Only RGBA and Gray Scale are supported!");
};
return result;
}
public void setFrameFormat(int format) {
mPreviewFormat = format;
}
private int mPreviewFormat = RGBA;
private CvCameraViewListener mOldStyleListener;
};
/**
* This class interface is abstract representation of single frame from camera for onCameraFrame callback
* Attention: Do not use objects, that represents this interface out of onCameraFrame callback!
*/
public interface CvCameraViewFrame {
/**
* This method returns RGBA Mat with frame
*/
public Mat rgba();
/**
* This method returns single channel gray scale Mat with frame
*/
public Mat gray();
};
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
Log.d(TAG, "call surfaceChanged event");
synchronized(mSyncObject) {
if (!mSurfaceExist) {
mSurfaceExist = true;
checkCurrentState();
} else {
/** Surface changed. We need to stop camera and restart with new parameters */
/* Pretend that old surface has been destroyed */
mSurfaceExist = false;
checkCurrentState();
/* Now use new surface. Say we have it now */
mSurfaceExist = true;
checkCurrentState();
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
/* Do nothing. Wait until surfaceChanged delivered */
}
public void surfaceDestroyed(SurfaceHolder holder) {
synchronized(mSyncObject) {
mSurfaceExist = false;
checkCurrentState();
}
}
/**
* This method is provided for clients, so they can enable the camera connection.
* The actual onCameraViewStarted callback will be delivered only after both this method is called and surface is available
*/
public void enableView() {
synchronized(mSyncObject) {
mEnabled = true;
checkCurrentState();
}
}
/**
* This method is provided for clients, so they can disable camera connection and stop
* the delivery of frames even though the surface view itself is not destroyed and still stays on the scren
*/
public void disableView() {
synchronized(mSyncObject) {
mEnabled = false;
checkCurrentState();
}
}
/**
* This method enables label with fps value on the screen
*/
public void enableFpsMeter() {
if (mFpsMeter == null) {
mFpsMeter = new FpsMeter();
mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
}
}
public void disableFpsMeter() {
mFpsMeter = null;
}
/**
*
* @param listener
*/
public void setCvCameraViewListener(CvCameraViewListener2 listener) {
mListener = listener;
}
public void setCvCameraViewListener(CvCameraViewListener listener) {
CvCameraViewListenerAdapter adapter = new CvCameraViewListenerAdapter(listener);
adapter.setFrameFormat(mPreviewFormat);
mListener = adapter;
}
/**
* This method sets the maximum size that camera frame is allowed to be. When selecting
* size - the biggest size which less or equal the size set will be selected.
* As an example - we set setMaxFrameSize(200,200) and we have 176x152 and 320x240 sizes. The
* preview frame will be selected with 176x152 size.
* This method is useful when need to restrict the size of preview frame for some reason (for example for video recording)
* @param maxWidth - the maximum width allowed for camera frame.
* @param maxHeight - the maximum height allowed for camera frame
*/
public void setMaxFrameSize(int maxWidth, int maxHeight) {
mMaxWidth = maxWidth;
mMaxHeight = maxHeight;
}
public void SetCaptureFormat(int format)
{
mPreviewFormat = format;
if (mListener instanceof CvCameraViewListenerAdapter) {
CvCameraViewListenerAdapter adapter = (CvCameraViewListenerAdapter) mListener;
adapter.setFrameFormat(mPreviewFormat);
}
}
/**
* Called when mSyncObject lock is held
*/
private void checkCurrentState() {
Log.d(TAG, "call checkCurrentState");
int targetState;
if (mEnabled && mSurfaceExist && getVisibility() == VISIBLE) {
targetState = STARTED;
} else {
targetState = STOPPED;
}
if (targetState != mState) {
/* The state change detected. Need to exit the current state and enter target state */
processExitState(mState);
mState = targetState;
processEnterState(mState);
}
}
private void processEnterState(int state) {
Log.d(TAG, "call processEnterState: " + state);
switch(state) {
case STARTED:
onEnterStartedState();
if (mListener != null) {
mListener.onCameraViewStarted(mFrameWidth, mFrameHeight);
}
break;
case STOPPED:
onEnterStoppedState();
if (mListener != null) {
mListener.onCameraViewStopped();
}
break;
};
}
private void processExitState(int state) {
Log.d(TAG, "call processExitState: " + state);
switch(state) {
case STARTED:
onExitStartedState();
break;
case STOPPED:
onExitStoppedState();
break;
};
}
private void onEnterStoppedState() {
/* nothing to do */
}
private void onExitStoppedState() {
/* nothing to do */
}
// NOTE: The order of bitmap constructor and camera connection is important for android 4.1.x
// Bitmap must be constructed before surface
private void onEnterStartedState() {
Log.d(TAG, "call onEnterStartedState");
/* Connect camera */
if (!connectCamera(getWidth(), getHeight())) {
AlertDialog ad = new AlertDialog.Builder(getContext()).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("It seems that you device does not support camera (or it is locked). Application will be closed.");
ad.setButton(DialogInterface.BUTTON_NEUTRAL, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
((Activity) getContext()).finish();
}
});
ad.show();
}
}
private void onExitStartedState() {
disconnectCamera();
if (mCacheBitmap != null) {
mCacheBitmap.recycle();
}
}
/**
* This method shall be called by the subclasses when they have valid
* object and want it to be delivered to external client (via callback) and
* then displayed on the screen.
* @param frame - the current frame to be delivered
*/
protected void deliverAndDrawFrame(CvCameraViewFrame frame) {
Mat modified;
if (mListener != null) {
modified = mListener.onCameraFrame(frame);
} else {
modified = frame.rgba();
}
boolean bmpValid = true;
if (modified != null) {
try {
Utils.matToBitmap(modified, mCacheBitmap);
} catch(Exception e) {
Log.e(TAG, "Mat type: " + modified);
Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmpValid = false;
}
}
if (bmpValid && mCacheBitmap != null) {
Canvas canvas = getHolder().lockCanvas();
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
if (BuildConfig.DEBUG)
Log.d(TAG, "mStretch value: " + mScale);
if (mScale != 0) {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2),
(int)((canvas.getWidth() - mScale*mCacheBitmap.getWidth()) / 2 + mScale*mCacheBitmap.getWidth()),
(int)((canvas.getHeight() - mScale*mCacheBitmap.getHeight()) / 2 + mScale*mCacheBitmap.getHeight())), null);
} else {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((canvas.getWidth() - mCacheBitmap.getWidth()) / 2,
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2,
(canvas.getWidth() - mCacheBitmap.getWidth()) / 2 + mCacheBitmap.getWidth(),
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2 + mCacheBitmap.getHeight()), null);
}
if (mFpsMeter != null) {
mFpsMeter.measure();
mFpsMeter.draw(canvas, 20, 30);
}
getHolder().unlockCanvasAndPost(canvas);
}
}
}
/**
* This method is invoked shall perform concrete operation to initialize the camera.
* CONTRACT: as a result of this method variables mFrameWidth and mFrameHeight MUST be
* initialized with the size of the Camera frames that will be delivered to external processor.
* @param width - the width of this SurfaceView
* @param height - the height of this SurfaceView
*/
protected abstract boolean connectCamera(int width, int height);
/**
* Disconnects and release the particular camera object being connected to this surface view.
* Called when syncObject lock is held
*/
protected abstract void disconnectCamera();
// NOTE: On Android 4.1.x the function must be called before SurfaceTexture constructor!
protected void AllocateCache()
{
mCacheBitmap = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888);
}
public interface ListItemAccessor {
public int getWidth(Object obj);
public int getHeight(Object obj);
};
/**
* This helper method can be called by subclasses to select camera preview size.
* It goes over the list of the supported preview sizes and selects the maximum one which
* fits both values set via setMaxFrameSize() and surface frame allocated for this view
* @param supportedSizes
* @param surfaceWidth
* @param surfaceHeight
* @return optimal frame size
*/
protected Size calculateCameraFrameSize(List<?> supportedSizes, ListItemAccessor accessor, int surfaceWidth, int surfaceHeight) {
int calcWidth = 0;
int calcHeight = 0;
int maxAllowedWidth = (mMaxWidth != MAX_UNSPECIFIED && mMaxWidth < surfaceWidth)? mMaxWidth : surfaceWidth;
int maxAllowedHeight = (mMaxHeight != MAX_UNSPECIFIED && mMaxHeight < surfaceHeight)? mMaxHeight : surfaceHeight;
for (Object size : supportedSizes) {
int width = accessor.getWidth(size);
int height = accessor.getHeight(size);
if (width <= maxAllowedWidth && height <= maxAllowedHeight) {
if (width >= calcWidth && height >= calcHeight) {
calcWidth = (int) width;
calcHeight = (int) height;
}
}
}
return new Size(calcWidth, calcHeight);
}
}

@ -1,440 +0,0 @@
package org.opencv.android;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import org.opencv.android.CameraGLSurfaceView.CameraTextureListener;
import android.annotation.TargetApi;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.util.Log;
import android.view.View;
@TargetApi(15)
public abstract class CameraGLRendererBase implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
protected final String LOGTAG = "CameraGLRendererBase";
// shaders
private final String vss = ""
+ "attribute vec2 vPosition;\n"
+ "attribute vec2 vTexCoord;\n" + "varying vec2 texCoord;\n"
+ "void main() {\n" + " texCoord = vTexCoord;\n"
+ " gl_Position = vec4 ( vPosition.x, vPosition.y, 0.0, 1.0 );\n"
+ "}";
private final String fssOES = ""
+ "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "uniform samplerExternalOES sTexture;\n"
+ "varying vec2 texCoord;\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(sTexture,texCoord);\n" + "}";
private final String fss2D = ""
+ "precision mediump float;\n"
+ "uniform sampler2D sTexture;\n"
+ "varying vec2 texCoord;\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(sTexture,texCoord);\n" + "}";
// coord-s
private final float vertices[] = {
-1, -1,
-1, 1,
1, -1,
1, 1 };
private final float texCoordOES[] = {
0, 1,
0, 0,
1, 1,
1, 0 };
private final float texCoord2D[] = {
0, 0,
0, 1,
1, 0,
1, 1 };
private int[] texCamera = {0}, texFBO = {0}, texDraw = {0};
private int[] FBO = {0};
private int progOES = -1, prog2D = -1;
private int vPosOES, vTCOES, vPos2D, vTC2D;
private FloatBuffer vert, texOES, tex2D;
protected int mCameraWidth = -1, mCameraHeight = -1;
protected int mFBOWidth = -1, mFBOHeight = -1;
protected int mMaxCameraWidth = -1, mMaxCameraHeight = -1;
protected int mCameraIndex = CameraBridgeViewBase.CAMERA_ID_ANY;
protected SurfaceTexture mSTexture;
protected boolean mHaveSurface = false;
protected boolean mHaveFBO = false;
protected boolean mUpdateST = false;
protected boolean mEnabled = true;
protected boolean mIsStarted = false;
protected CameraGLSurfaceView mView;
protected abstract void openCamera(int id);
protected abstract void closeCamera();
protected abstract void setCameraPreviewSize(int width, int height); // updates mCameraWidth & mCameraHeight
public CameraGLRendererBase(CameraGLSurfaceView view) {
mView = view;
int bytes = vertices.length * Float.SIZE / Byte.SIZE;
vert = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer();
texOES = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer();
tex2D = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer();
vert.put(vertices).position(0);
texOES.put(texCoordOES).position(0);
tex2D.put(texCoord2D).position(0);
}
@Override
public synchronized void onFrameAvailable(SurfaceTexture surfaceTexture) {
//Log.i(LOGTAG, "onFrameAvailable");
mUpdateST = true;
mView.requestRender();
}
@Override
public void onDrawFrame(GL10 gl) {
//Log.i(LOGTAG, "onDrawFrame start");
if (!mHaveFBO)
return;
synchronized(this) {
if (mUpdateST) {
mSTexture.updateTexImage();
mUpdateST = false;
}
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
CameraTextureListener texListener = mView.getCameraTextureListener();
if(texListener != null) {
//Log.d(LOGTAG, "haveUserCallback");
// texCamera(OES) -> texFBO
drawTex(texCamera[0], true, FBO[0]);
// call user code (texFBO -> texDraw)
boolean modified = texListener.onCameraTexture(texFBO[0], texDraw[0], mCameraWidth, mCameraHeight);
if(modified) {
// texDraw -> screen
drawTex(texDraw[0], false, 0);
} else {
// texFBO -> screen
drawTex(texFBO[0], false, 0);
}
} else {
Log.d(LOGTAG, "texCamera(OES) -> screen");
// texCamera(OES) -> screen
drawTex(texCamera[0], true, 0);
}
//Log.i(LOGTAG, "onDrawFrame end");
}
}
@Override
public void onSurfaceChanged(GL10 gl, int surfaceWidth, int surfaceHeight) {
Log.i(LOGTAG, "onSurfaceChanged("+surfaceWidth+"x"+surfaceHeight+")");
mHaveSurface = true;
updateState();
setPreviewSize(surfaceWidth, surfaceHeight);
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
Log.i(LOGTAG, "onSurfaceCreated");
initShaders();
}
private void initShaders() {
String strGLVersion = GLES20.glGetString(GLES20.GL_VERSION);
if (strGLVersion != null)
Log.i(LOGTAG, "OpenGL ES version: " + strGLVersion);
GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
progOES = loadShader(vss, fssOES);
vPosOES = GLES20.glGetAttribLocation(progOES, "vPosition");
vTCOES = GLES20.glGetAttribLocation(progOES, "vTexCoord");
GLES20.glEnableVertexAttribArray(vPosOES);
GLES20.glEnableVertexAttribArray(vTCOES);
prog2D = loadShader(vss, fss2D);
vPos2D = GLES20.glGetAttribLocation(prog2D, "vPosition");
vTC2D = GLES20.glGetAttribLocation(prog2D, "vTexCoord");
GLES20.glEnableVertexAttribArray(vPos2D);
GLES20.glEnableVertexAttribArray(vTC2D);
}
private void initSurfaceTexture() {
Log.d(LOGTAG, "initSurfaceTexture");
deleteSurfaceTexture();
initTexOES(texCamera);
mSTexture = new SurfaceTexture(texCamera[0]);
mSTexture.setOnFrameAvailableListener(this);
}
private void deleteSurfaceTexture() {
Log.d(LOGTAG, "deleteSurfaceTexture");
if(mSTexture != null) {
mSTexture.release();
mSTexture = null;
deleteTex(texCamera);
}
}
private void initTexOES(int[] tex) {
if(tex.length == 1) {
GLES20.glGenTextures(1, tex, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex[0]);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
}
}
private static void deleteTex(int[] tex) {
if(tex.length == 1) {
GLES20.glDeleteTextures(1, tex, 0);
}
}
private static int loadShader(String vss, String fss) {
Log.d("CameraGLRendererBase", "loadShader");
int vshader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
GLES20.glShaderSource(vshader, vss);
GLES20.glCompileShader(vshader);
int[] status = new int[1];
GLES20.glGetShaderiv(vshader, GLES20.GL_COMPILE_STATUS, status, 0);
if (status[0] == 0) {
Log.e("CameraGLRendererBase", "Could not compile vertex shader: "+GLES20.glGetShaderInfoLog(vshader));
GLES20.glDeleteShader(vshader);
vshader = 0;
return 0;
}
int fshader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
GLES20.glShaderSource(fshader, fss);
GLES20.glCompileShader(fshader);
GLES20.glGetShaderiv(fshader, GLES20.GL_COMPILE_STATUS, status, 0);
if (status[0] == 0) {
Log.e("CameraGLRendererBase", "Could not compile fragment shader:"+GLES20.glGetShaderInfoLog(fshader));
GLES20.glDeleteShader(vshader);
GLES20.glDeleteShader(fshader);
fshader = 0;
return 0;
}
int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vshader);
GLES20.glAttachShader(program, fshader);
GLES20.glLinkProgram(program);
GLES20.glDeleteShader(vshader);
GLES20.glDeleteShader(fshader);
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, status, 0);
if (status[0] == 0) {
Log.e("CameraGLRendererBase", "Could not link shader program: "+GLES20.glGetProgramInfoLog(program));
program = 0;
return 0;
}
GLES20.glValidateProgram(program);
GLES20.glGetProgramiv(program, GLES20.GL_VALIDATE_STATUS, status, 0);
if (status[0] == 0)
{
Log.e("CameraGLRendererBase", "Shader program validation error: "+GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
return 0;
}
Log.d("CameraGLRendererBase", "Shader program is built OK");
return program;
}
private void deleteFBO()
{
Log.d(LOGTAG, "deleteFBO("+mFBOWidth+"x"+mFBOHeight+")");
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glDeleteFramebuffers(1, FBO, 0);
deleteTex(texFBO);
deleteTex(texDraw);
mFBOWidth = mFBOHeight = 0;
}
private void initFBO(int width, int height)
{
Log.d(LOGTAG, "initFBO("+width+"x"+height+")");
deleteFBO();
GLES20.glGenTextures(1, texDraw, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texDraw[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
GLES20.glGenTextures(1, texFBO, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texFBO[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
//int hFBO;
GLES20.glGenFramebuffers(1, FBO, 0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, FBO[0]);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, texFBO[0], 0);
Log.d(LOGTAG, "initFBO error status: " + GLES20.glGetError());
int FBOstatus = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
if (FBOstatus != GLES20.GL_FRAMEBUFFER_COMPLETE)
Log.e(LOGTAG, "initFBO failed, status: " + FBOstatus);
mFBOWidth = width;
mFBOHeight = height;
}
// draw texture to FBO or to screen if fbo == 0
private void drawTex(int tex, boolean isOES, int fbo)
{
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fbo);
if(fbo == 0)
GLES20.glViewport(0, 0, mView.getWidth(), mView.getHeight());
else
GLES20.glViewport(0, 0, mFBOWidth, mFBOHeight);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
if(isOES) {
GLES20.glUseProgram(progOES);
GLES20.glVertexAttribPointer(vPosOES, 2, GLES20.GL_FLOAT, false, 4*2, vert);
GLES20.glVertexAttribPointer(vTCOES, 2, GLES20.GL_FLOAT, false, 4*2, texOES);
} else {
GLES20.glUseProgram(prog2D);
GLES20.glVertexAttribPointer(vPos2D, 2, GLES20.GL_FLOAT, false, 4*2, vert);
GLES20.glVertexAttribPointer(vTC2D, 2, GLES20.GL_FLOAT, false, 4*2, tex2D);
}
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
if(isOES) {
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex);
GLES20.glUniform1i(GLES20.glGetUniformLocation(progOES, "sTexture"), 0);
} else {
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, tex);
GLES20.glUniform1i(GLES20.glGetUniformLocation(prog2D, "sTexture"), 0);
}
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glFlush();
}
public synchronized void enableView() {
Log.d(LOGTAG, "enableView");
mEnabled = true;
updateState();
}
public synchronized void disableView() {
Log.d(LOGTAG, "disableView");
mEnabled = false;
updateState();
}
protected void updateState() {
Log.d(LOGTAG, "updateState");
Log.d(LOGTAG, "mEnabled="+mEnabled+", mHaveSurface="+mHaveSurface);
boolean willStart = mEnabled && mHaveSurface && mView.getVisibility() == View.VISIBLE;
if (willStart != mIsStarted) {
if(willStart) doStart();
else doStop();
} else {
Log.d(LOGTAG, "keeping State unchanged");
}
Log.d(LOGTAG, "updateState end");
}
protected synchronized void doStart() {
Log.d(LOGTAG, "doStart");
initSurfaceTexture();
openCamera(mCameraIndex);
mIsStarted = true;
if(mCameraWidth>0 && mCameraHeight>0)
setPreviewSize(mCameraWidth, mCameraHeight); // start preview and call listener.onCameraViewStarted()
}
protected void doStop() {
Log.d(LOGTAG, "doStop");
synchronized(this) {
mUpdateST = false;
mIsStarted = false;
mHaveFBO = false;
closeCamera();
deleteSurfaceTexture();
}
CameraTextureListener listener = mView.getCameraTextureListener();
if(listener != null) listener.onCameraViewStopped();
}
protected void setPreviewSize(int width, int height) {
synchronized(this) {
mHaveFBO = false;
mCameraWidth = width;
mCameraHeight = height;
setCameraPreviewSize(width, height); // can change mCameraWidth & mCameraHeight
initFBO(mCameraWidth, mCameraHeight);
mHaveFBO = true;
}
CameraTextureListener listener = mView.getCameraTextureListener();
if(listener != null) listener.onCameraViewStarted(mCameraWidth, mCameraHeight);
}
public void setCameraIndex(int cameraIndex) {
disableView();
mCameraIndex = cameraIndex;
enableView();
}
public void setMaxCameraPreviewSize(int maxWidth, int maxHeight) {
disableView();
mMaxCameraWidth = maxWidth;
mMaxCameraHeight = maxHeight;
enableView();
}
public void onResume() {
Log.i(LOGTAG, "onResume");
}
public void onPause() {
Log.i(LOGTAG, "onPause");
mHaveSurface = false;
updateState();
mCameraWidth = mCameraHeight = -1;
}
}

@ -1,119 +0,0 @@
package org.opencv.android;
import org.opencv.R;
import android.content.Context;
import android.content.res.TypedArray;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
public class CameraGLSurfaceView extends GLSurfaceView {
private static final String LOGTAG = "CameraGLSurfaceView";
public interface CameraTextureListener {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* @param width - the width of the frames that will be delivered
* @param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when a new preview frame from Camera is ready.
* @param texIn - the OpenGL texture ID that contains frame in RGBA format
* @param texOut - the OpenGL texture ID that can be used to store modified frame image t display
* @param width - the width of the frame
* @param height - the height of the frame
* @return `true` if `texOut` should be displayed, `false` - to show `texIn`
*/
public boolean onCameraTexture(int texIn, int texOut, int width, int height);
};
private CameraTextureListener mTexListener;
private CameraGLRendererBase mRenderer;
public CameraGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
TypedArray styledAttrs = getContext().obtainStyledAttributes(attrs, R.styleable.CameraBridgeViewBase);
int cameraIndex = styledAttrs.getInt(R.styleable.CameraBridgeViewBase_camera_id, -1);
styledAttrs.recycle();
if(android.os.Build.VERSION.SDK_INT >= 21)
mRenderer = new Camera2Renderer(this);
else
mRenderer = new CameraRenderer(this);
setCameraIndex(cameraIndex);
setEGLContextClientVersion(2);
setRenderer(mRenderer);
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
public void setCameraTextureListener(CameraTextureListener texListener)
{
mTexListener = texListener;
}
public CameraTextureListener getCameraTextureListener()
{
return mTexListener;
}
public void setCameraIndex(int cameraIndex) {
mRenderer.setCameraIndex(cameraIndex);
}
public void setMaxCameraPreviewSize(int maxWidth, int maxHeight) {
mRenderer.setMaxCameraPreviewSize(maxWidth, maxHeight);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
super.surfaceCreated(holder);
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
mRenderer.mHaveSurface = false;
super.surfaceDestroyed(holder);
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
super.surfaceChanged(holder, format, w, h);
}
@Override
public void onResume() {
Log.i(LOGTAG, "onResume");
super.onResume();
mRenderer.onResume();
}
@Override
public void onPause() {
Log.i(LOGTAG, "onPause");
mRenderer.onPause();
super.onPause();
}
public void enableView() {
mRenderer.enableView();
}
public void disableView() {
mRenderer.disableView();
}
}

@ -1,166 +0,0 @@
package org.opencv.android;
import java.io.IOException;
import java.util.List;
import android.annotation.TargetApi;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.os.Build;
import android.util.Log;
@TargetApi(15)
@SuppressWarnings("deprecation")
public class CameraRenderer extends CameraGLRendererBase {
public static final String LOGTAG = "CameraRenderer";
private Camera mCamera;
private boolean mPreviewStarted = false;
CameraRenderer(CameraGLSurfaceView view) {
super(view);
}
@Override
protected synchronized void closeCamera() {
Log.i(LOGTAG, "closeCamera");
if(mCamera != null) {
mCamera.stopPreview();
mPreviewStarted = false;
mCamera.release();
mCamera = null;
}
}
@Override
protected synchronized void openCamera(int id) {
Log.i(LOGTAG, "openCamera");
closeCamera();
if (id == CameraBridgeViewBase.CAMERA_ID_ANY) {
Log.d(LOGTAG, "Trying to open camera with old open()");
try {
mCamera = Camera.open();
}
catch (Exception e){
Log.e(LOGTAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
}
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
boolean connected = false;
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Log.d(LOGTAG, "Trying to open camera with new open(" + camIdx + ")");
try {
mCamera = Camera.open(camIdx);
connected = true;
} catch (RuntimeException e) {
Log.e(LOGTAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
}
if (connected) break;
}
}
} else {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
int localCameraIndex = mCameraIndex;
if (mCameraIndex == CameraBridgeViewBase.CAMERA_ID_BACK) {
Log.i(LOGTAG, "Trying to open BACK camera");
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Camera.getCameraInfo( camIdx, cameraInfo );
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
localCameraIndex = camIdx;
break;
}
}
} else if (mCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT) {
Log.i(LOGTAG, "Trying to open FRONT camera");
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Camera.getCameraInfo( camIdx, cameraInfo );
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
localCameraIndex = camIdx;
break;
}
}
}
if (localCameraIndex == CameraBridgeViewBase.CAMERA_ID_BACK) {
Log.e(LOGTAG, "Back camera not found!");
} else if (localCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT) {
Log.e(LOGTAG, "Front camera not found!");
} else {
Log.d(LOGTAG, "Trying to open camera with new open(" + localCameraIndex + ")");
try {
mCamera = Camera.open(localCameraIndex);
} catch (RuntimeException e) {
Log.e(LOGTAG, "Camera #" + localCameraIndex + "failed to open: " + e.getLocalizedMessage());
}
}
}
}
if(mCamera == null) {
Log.e(LOGTAG, "Error: can't open camera");
return;
}
Camera.Parameters params = mCamera.getParameters();
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes != null && FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
try {
mCamera.setPreviewTexture(mSTexture);
} catch (IOException ioe) {
Log.e(LOGTAG, "setPreviewTexture() failed: " + ioe.getMessage());
}
}
@Override
public synchronized void setCameraPreviewSize(int width, int height) {
Log.i(LOGTAG, "setCameraPreviewSize: "+width+"x"+height);
if(mCamera == null) {
Log.e(LOGTAG, "Camera isn't initialized!");
return;
}
if(mMaxCameraWidth > 0 && mMaxCameraWidth < width) width = mMaxCameraWidth;
if(mMaxCameraHeight > 0 && mMaxCameraHeight < height) height = mMaxCameraHeight;
Camera.Parameters param = mCamera.getParameters();
List<Size> psize = param.getSupportedPreviewSizes();
int bestWidth = 0, bestHeight = 0;
if (psize.size() > 0) {
float aspect = (float)width / height;
for (Size size : psize) {
int w = size.width, h = size.height;
Log.d(LOGTAG, "checking camera preview size: "+w+"x"+h);
if ( w <= width && h <= height &&
w >= bestWidth && h >= bestHeight &&
Math.abs(aspect - (float)w/h) < 0.2 ) {
bestWidth = w;
bestHeight = h;
}
}
if(bestWidth <= 0 || bestHeight <= 0) {
bestWidth = psize.get(0).width;
bestHeight = psize.get(0).height;
Log.e(LOGTAG, "Error: best size was not selected, using "+bestWidth+" x "+bestHeight);
} else {
Log.i(LOGTAG, "Selected best size: "+bestWidth+" x "+bestHeight);
}
if(mPreviewStarted) {
mCamera.stopPreview();
mPreviewStarted = false;
}
mCameraWidth = bestWidth;
mCameraHeight = bestHeight;
param.setPreviewSize(bestWidth, bestHeight);
}
param.set("orientation", "landscape");
mCamera.setParameters(param);
mCamera.startPreview();
mPreviewStarted = true;
}
}

@ -1,66 +0,0 @@
package org.opencv.android;
import java.text.DecimalFormat;
import org.opencv.core.Core;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.util.Log;
public class FpsMeter {
private static final String TAG = "FpsMeter";
private static final int STEP = 20;
private static final DecimalFormat FPS_FORMAT = new DecimalFormat("0.00");
private int mFramesCouner;
private double mFrequency;
private long mprevFrameTime;
private String mStrfps;
Paint mPaint;
boolean mIsInitialized = false;
int mWidth = 0;
int mHeight = 0;
public void init() {
mFramesCouner = 0;
mFrequency = Core.getTickFrequency();
mprevFrameTime = Core.getTickCount();
mStrfps = "";
mPaint = new Paint();
mPaint.setColor(Color.BLUE);
mPaint.setTextSize(20);
}
public void measure() {
if (!mIsInitialized) {
init();
mIsInitialized = true;
} else {
mFramesCouner++;
if (mFramesCouner % STEP == 0) {
long time = Core.getTickCount();
double fps = STEP * mFrequency / (time - mprevFrameTime);
mprevFrameTime = time;
if (mWidth != 0 && mHeight != 0)
mStrfps = FPS_FORMAT.format(fps) + " FPS@" + Integer.valueOf(mWidth) + "x" + Integer.valueOf(mHeight);
else
mStrfps = FPS_FORMAT.format(fps) + " FPS";
Log.i(TAG, mStrfps);
}
}
}
public void setResolution(int width, int height) {
mWidth = width;
mHeight = height;
}
public void draw(Canvas canvas, float offsetx, float offsety) {
Log.d(TAG, mStrfps);
canvas.drawText(mStrfps, offsetx, offsety, mPaint);
}
}

@ -1,34 +0,0 @@
package org.opencv.android;
/**
* Installation callback interface.
*/
public interface InstallCallbackInterface
{
/**
* New package installation is required.
*/
static final int NEW_INSTALLATION = 0;
/**
* Current package installation is in progress.
*/
static final int INSTALLATION_PROGRESS = 1;
/**
* Target package name.
* @return Return target package name.
*/
public String getPackageName();
/**
* Installation is approved.
*/
public void install();
/**
* Installation is canceled.
*/
public void cancel();
/**
* Wait for package installation.
*/
public void wait_install();
};

@ -1,379 +0,0 @@
package org.opencv.android;
import java.util.List;
import android.content.Context;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.os.Build;
import android.util.AttributeSet;
import android.util.Log;
import android.view.ViewGroup.LayoutParams;
import org.opencv.BuildConfig;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
/**
* This class is an implementation of the Bridge View between OpenCV and Java Camera.
* This class relays on the functionality available in base class and only implements
* required functions:
* connectCamera - opens Java camera and sets the PreviewCallback to be delivered.
* disconnectCamera - closes the camera and stops preview.
* When frame is delivered via callback from Camera - it processed via OpenCV to be
* converted to RGBA32 and then passed to the external callback for modifications if required.
*/
public class JavaCameraView extends CameraBridgeViewBase implements PreviewCallback {
private static final int MAGIC_TEXTURE_ID = 10;
private static final String TAG = "JavaCameraView";
private byte mBuffer[];
private Mat[] mFrameChain;
private int mChainIdx = 0;
private Thread mThread;
private boolean mStopThread;
protected Camera mCamera;
protected JavaCameraFrame[] mCameraFrame;
private SurfaceTexture mSurfaceTexture;
private int mPreviewFormat = ImageFormat.NV21;
public static class JavaCameraSizeAccessor implements ListItemAccessor {
@Override
public int getWidth(Object obj) {
Camera.Size size = (Camera.Size) obj;
return size.width;
}
@Override
public int getHeight(Object obj) {
Camera.Size size = (Camera.Size) obj;
return size.height;
}
}
public JavaCameraView(Context context, int cameraId) {
super(context, cameraId);
}
public JavaCameraView(Context context, AttributeSet attrs) {
super(context, attrs);
}
protected boolean initializeCamera(int width, int height) {
Log.d(TAG, "Initialize java camera");
boolean result = true;
synchronized (this) {
mCamera = null;
if (mCameraIndex == CAMERA_ID_ANY) {
Log.d(TAG, "Trying to open camera with old open()");
try {
mCamera = Camera.open();
}
catch (Exception e){
Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
}
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
boolean connected = false;
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(camIdx) + ")");
try {
mCamera = Camera.open(camIdx);
connected = true;
} catch (RuntimeException e) {
Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
}
if (connected) break;
}
}
} else {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
int localCameraIndex = mCameraIndex;
if (mCameraIndex == CAMERA_ID_BACK) {
Log.i(TAG, "Trying to open back camera");
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Camera.getCameraInfo( camIdx, cameraInfo );
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
localCameraIndex = camIdx;
break;
}
}
} else if (mCameraIndex == CAMERA_ID_FRONT) {
Log.i(TAG, "Trying to open front camera");
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Camera.getCameraInfo( camIdx, cameraInfo );
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
localCameraIndex = camIdx;
break;
}
}
}
if (localCameraIndex == CAMERA_ID_BACK) {
Log.e(TAG, "Back camera not found!");
} else if (localCameraIndex == CAMERA_ID_FRONT) {
Log.e(TAG, "Front camera not found!");
} else {
Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(localCameraIndex) + ")");
try {
mCamera = Camera.open(localCameraIndex);
} catch (RuntimeException e) {
Log.e(TAG, "Camera #" + localCameraIndex + "failed to open: " + e.getLocalizedMessage());
}
}
}
}
if (mCamera == null)
return false;
/* Now set camera parameters */
try {
Camera.Parameters params = mCamera.getParameters();
Log.d(TAG, "getSupportedPreviewSizes()");
List<android.hardware.Camera.Size> sizes = params.getSupportedPreviewSizes();
if (sizes != null) {
/* Select the size that fits surface considering maximum size allowed */
Size frameSize = calculateCameraFrameSize(sizes, new JavaCameraSizeAccessor(), width, height);
/* Image format NV21 causes issues in the Android emulators */
if (Build.FINGERPRINT.startsWith("generic")
|| Build.FINGERPRINT.startsWith("unknown")
|| Build.MODEL.contains("google_sdk")
|| Build.MODEL.contains("Emulator")
|| Build.MODEL.contains("Android SDK built for x86")
|| Build.MANUFACTURER.contains("Genymotion")
|| (Build.BRAND.startsWith("generic") && Build.DEVICE.startsWith("generic"))
|| "google_sdk".equals(Build.PRODUCT))
params.setPreviewFormat(ImageFormat.YV12); // "generic" or "android" = android emulator
else
params.setPreviewFormat(ImageFormat.NV21);
mPreviewFormat = params.getPreviewFormat();
Log.d(TAG, "Set preview size to " + Integer.valueOf((int)frameSize.width) + "x" + Integer.valueOf((int)frameSize.height));
params.setPreviewSize((int)frameSize.width, (int)frameSize.height);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH && !android.os.Build.MODEL.equals("GT-I9100"))
params.setRecordingHint(true);
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes != null && FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
params = mCamera.getParameters();
mFrameWidth = params.getPreviewSize().width;
mFrameHeight = params.getPreviewSize().height;
if ((getLayoutParams().width == LayoutParams.MATCH_PARENT) && (getLayoutParams().height == LayoutParams.MATCH_PARENT))
mScale = Math.min(((float)height)/mFrameHeight, ((float)width)/mFrameWidth);
else
mScale = 0;
if (mFpsMeter != null) {
mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
}
int size = mFrameWidth * mFrameHeight;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
mCamera.addCallbackBuffer(mBuffer);
mCamera.setPreviewCallbackWithBuffer(this);
mFrameChain = new Mat[2];
mFrameChain[0] = new Mat(mFrameHeight + (mFrameHeight/2), mFrameWidth, CvType.CV_8UC1);
mFrameChain[1] = new Mat(mFrameHeight + (mFrameHeight/2), mFrameWidth, CvType.CV_8UC1);
AllocateCache();
mCameraFrame = new JavaCameraFrame[2];
mCameraFrame[0] = new JavaCameraFrame(mFrameChain[0], mFrameWidth, mFrameHeight);
mCameraFrame[1] = new JavaCameraFrame(mFrameChain[1], mFrameWidth, mFrameHeight);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mSurfaceTexture = new SurfaceTexture(MAGIC_TEXTURE_ID);
mCamera.setPreviewTexture(mSurfaceTexture);
} else
mCamera.setPreviewDisplay(null);
/* Finally we are ready to start the preview */
Log.d(TAG, "startPreview");
mCamera.startPreview();
}
else
result = false;
} catch (Exception e) {
result = false;
e.printStackTrace();
}
}
return result;
}
protected void releaseCamera() {
synchronized (this) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
}
mCamera = null;
if (mFrameChain != null) {
mFrameChain[0].release();
mFrameChain[1].release();
}
if (mCameraFrame != null) {
mCameraFrame[0].release();
mCameraFrame[1].release();
}
}
}
private boolean mCameraFrameReady = false;
@Override
protected boolean connectCamera(int width, int height) {
/* 1. We need to instantiate camera
* 2. We need to start thread which will be getting frames
*/
/* First step - initialize camera connection */
Log.d(TAG, "Connecting to camera");
if (!initializeCamera(width, height))
return false;
mCameraFrameReady = false;
/* now we can start update thread */
Log.d(TAG, "Starting processing thread");
mStopThread = false;
mThread = new Thread(new CameraWorker());
mThread.start();
return true;
}
@Override
protected void disconnectCamera() {
/* 1. We need to stop thread which updating the frames
* 2. Stop camera and release it
*/
Log.d(TAG, "Disconnecting from camera");
try {
mStopThread = true;
Log.d(TAG, "Notify thread");
synchronized (this) {
this.notify();
}
Log.d(TAG, "Waiting for thread");
if (mThread != null)
mThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
mThread = null;
}
/* Now release camera */
releaseCamera();
mCameraFrameReady = false;
}
@Override
public void onPreviewFrame(byte[] frame, Camera arg1) {
if (BuildConfig.DEBUG)
Log.d(TAG, "Preview Frame received. Frame size: " + frame.length);
synchronized (this) {
mFrameChain[mChainIdx].put(0, 0, frame);
mCameraFrameReady = true;
this.notify();
}
if (mCamera != null)
mCamera.addCallbackBuffer(mBuffer);
}
private class JavaCameraFrame implements CvCameraViewFrame {
@Override
public Mat gray() {
return mYuvFrameData.submat(0, mHeight, 0, mWidth);
}
@Override
public Mat rgba() {
if (mPreviewFormat == ImageFormat.NV21)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
else if (mPreviewFormat == ImageFormat.YV12)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4); // COLOR_YUV2RGBA_YV12 produces inverted colors
else
throw new IllegalArgumentException("Preview Format can be NV21 or YV12");
return mRgba;
}
public JavaCameraFrame(Mat Yuv420sp, int width, int height) {
super();
mWidth = width;
mHeight = height;
mYuvFrameData = Yuv420sp;
mRgba = new Mat();
}
public void release() {
mRgba.release();
}
private Mat mYuvFrameData;
private Mat mRgba;
private int mWidth;
private int mHeight;
};
private class CameraWorker implements Runnable {
@Override
public void run() {
do {
boolean hasFrame = false;
synchronized (JavaCameraView.this) {
try {
while (!mCameraFrameReady && !mStopThread) {
JavaCameraView.this.wait();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
if (mCameraFrameReady)
{
mChainIdx = 1 - mChainIdx;
mCameraFrameReady = false;
hasFrame = true;
}
}
if (!mStopThread && hasFrame) {
if (!mFrameChain[1 - mChainIdx].empty())
deliverAndDrawFrame(mCameraFrame[1 - mChainIdx]);
}
} while (!mStopThread);
Log.d(TAG, "Finish processing thread");
}
}
}

@ -1,40 +0,0 @@
package org.opencv.android;
/**
* Interface for callback object in case of asynchronous initialization of OpenCV.
*/
public interface LoaderCallbackInterface
{
/**
* OpenCV initialization finished successfully.
*/
static final int SUCCESS = 0;
/**
* Google Play Market cannot be invoked.
*/
static final int MARKET_ERROR = 2;
/**
* OpenCV library installation has been canceled by the user.
*/
static final int INSTALL_CANCELED = 3;
/**
* This version of OpenCV Manager Service is incompatible with the app. Possibly, a service update is required.
*/
static final int INCOMPATIBLE_MANAGER_VERSION = 4;
/**
* OpenCV library initialization has failed.
*/
static final int INIT_FAILED = 0xff;
/**
* Callback method, called after OpenCV library initialization.
* @param status status of initialization (see initialization status constants).
*/
public void onManagerConnected(int status);
/**
* Callback method, called in case the package installation is needed.
* @param callback answer object with approve and cancel methods and the package description.
*/
public void onPackageInstall(final int operation, InstallCallbackInterface callback);
};

@ -1,132 +0,0 @@
package org.opencv.android;
import android.content.Context;
/**
* Helper class provides common initialization methods for OpenCV library.
*/
public class OpenCVLoader
{
/**
* OpenCV Library version 2.4.2.
*/
public static final String OPENCV_VERSION_2_4_2 = "2.4.2";
/**
* OpenCV Library version 2.4.3.
*/
public static final String OPENCV_VERSION_2_4_3 = "2.4.3";
/**
* OpenCV Library version 2.4.4.
*/
public static final String OPENCV_VERSION_2_4_4 = "2.4.4";
/**
* OpenCV Library version 2.4.5.
*/
public static final String OPENCV_VERSION_2_4_5 = "2.4.5";
/**
* OpenCV Library version 2.4.6.
*/
public static final String OPENCV_VERSION_2_4_6 = "2.4.6";
/**
* OpenCV Library version 2.4.7.
*/
public static final String OPENCV_VERSION_2_4_7 = "2.4.7";
/**
* OpenCV Library version 2.4.8.
*/
public static final String OPENCV_VERSION_2_4_8 = "2.4.8";
/**
* OpenCV Library version 2.4.9.
*/
public static final String OPENCV_VERSION_2_4_9 = "2.4.9";
/**
* OpenCV Library version 2.4.10.
*/
public static final String OPENCV_VERSION_2_4_10 = "2.4.10";
/**
* OpenCV Library version 2.4.11.
*/
public static final String OPENCV_VERSION_2_4_11 = "2.4.11";
/**
* OpenCV Library version 2.4.12.
*/
public static final String OPENCV_VERSION_2_4_12 = "2.4.12";
/**
* OpenCV Library version 2.4.13.
*/
public static final String OPENCV_VERSION_2_4_13 = "2.4.13";
/**
* OpenCV Library version 3.0.0.
*/
public static final String OPENCV_VERSION_3_0_0 = "3.0.0";
/**
* OpenCV Library version 3.1.0.
*/
public static final String OPENCV_VERSION_3_1_0 = "3.1.0";
/**
* OpenCV Library version 3.2.0.
*/
public static final String OPENCV_VERSION_3_2_0 = "3.2.0";
/**
* OpenCV Library version 3.3.0.
*/
public static final String OPENCV_VERSION_3_3_0 = "3.3.0";
/**
* OpenCV Library version 3.4.0.
*/
public static final String OPENCV_VERSION_3_4_0 = "3.4.0";
/**
* Current OpenCV Library version
*/
public static final String OPENCV_VERSION = "3.4.0";
/**
* Loads and initializes OpenCV library from current application package. Roughly, it's an analog of system.loadLibrary("opencv_java").
* @return Returns true is initialization of OpenCV was successful.
*/
public static boolean initDebug()
{
return StaticHelper.initOpenCV(false);
}
/**
* Loads and initializes OpenCV library from current application package. Roughly, it's an analog of system.loadLibrary("opencv_java").
* @param InitCuda load and initialize CUDA runtime libraries.
* @return Returns true is initialization of OpenCV was successful.
*/
public static boolean initDebug(boolean InitCuda)
{
return StaticHelper.initOpenCV(InitCuda);
}
/**
* Loads and initializes OpenCV library using OpenCV Engine service.
* @param Version OpenCV library version.
* @param AppContext application context for connecting to the service.
* @param Callback object, that implements LoaderCallbackInterface for handling the connection status.
* @return Returns true if initialization of OpenCV is successful.
*/
public static boolean initAsync(String Version, Context AppContext,
LoaderCallbackInterface Callback)
{
return AsyncServiceHelper.initOpenCV(Version, AppContext, Callback);
}
}

@ -1,104 +0,0 @@
package org.opencv.android;
import org.opencv.core.Core;
import java.util.StringTokenizer;
import android.util.Log;
class StaticHelper {
public static boolean initOpenCV(boolean InitCuda)
{
boolean result;
String libs = "";
if(InitCuda)
{
loadLibrary("cudart");
loadLibrary("nppc");
loadLibrary("nppi");
loadLibrary("npps");
loadLibrary("cufft");
loadLibrary("cublas");
}
Log.d(TAG, "Trying to get library list");
try
{
System.loadLibrary("opencv_info");
libs = getLibraryList();
}
catch(UnsatisfiedLinkError e)
{
Log.e(TAG, "OpenCV error: Cannot load info library for OpenCV");
}
Log.d(TAG, "Library list: \"" + libs + "\"");
Log.d(TAG, "First attempt to load libs");
if (initOpenCVLibs(libs))
{
Log.d(TAG, "First attempt to load libs is OK");
String eol = System.getProperty("line.separator");
for (String str : Core.getBuildInformation().split(eol))
Log.i(TAG, str);
result = true;
}
else
{
Log.d(TAG, "First attempt to load libs fails");
result = false;
}
return result;
}
private static boolean loadLibrary(String Name)
{
boolean result = true;
Log.d(TAG, "Trying to load library " + Name);
try
{
System.loadLibrary(Name);
Log.d(TAG, "Library " + Name + " loaded");
}
catch(UnsatisfiedLinkError e)
{
Log.d(TAG, "Cannot load library \"" + Name + "\"");
e.printStackTrace();
result &= false;
}
return result;
}
private static boolean initOpenCVLibs(String Libs)
{
Log.d(TAG, "Trying to init OpenCV libs");
boolean result = true;
if ((null != Libs) && (Libs.length() != 0))
{
Log.d(TAG, "Trying to load libs by dependency list");
StringTokenizer splitter = new StringTokenizer(Libs, ";");
while(splitter.hasMoreTokens())
{
result &= loadLibrary(splitter.nextToken());
}
}
else
{
// If dependencies list is not defined or empty.
result &= loadLibrary("opencv_java3");
}
return result;
}
private static final String TAG = "OpenCV/StaticHelper";
private static native String getLibraryList();
}

@ -1,139 +0,0 @@
package org.opencv.android;
import android.content.Context;
import android.graphics.Bitmap;
import org.opencv.core.CvException;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.imgcodecs.Imgcodecs;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
public class Utils {
public static String exportResource(Context context, int resourceId) {
return exportResource(context, resourceId, "OpenCV_data");
}
public static String exportResource(Context context, int resourceId, String dirname) {
String fullname = context.getResources().getString(resourceId);
String resName = fullname.substring(fullname.lastIndexOf("/") + 1);
try {
InputStream is = context.getResources().openRawResource(resourceId);
File resDir = context.getDir(dirname, Context.MODE_PRIVATE);
File resFile = new File(resDir, resName);
FileOutputStream os = new FileOutputStream(resFile);
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
os.close();
return resFile.getAbsolutePath();
} catch (IOException e) {
e.printStackTrace();
throw new CvException("Failed to export resource " + resName
+ ". Exception thrown: " + e);
}
}
public static Mat loadResource(Context context, int resourceId) throws IOException
{
return loadResource(context, resourceId, -1);
}
public static Mat loadResource(Context context, int resourceId, int flags) throws IOException
{
InputStream is = context.getResources().openRawResource(resourceId);
ByteArrayOutputStream os = new ByteArrayOutputStream(is.available());
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
Mat encoded = new Mat(1, os.size(), CvType.CV_8U);
encoded.put(0, 0, os.toByteArray());
os.close();
Mat decoded = Imgcodecs.imdecode(encoded, flags);
encoded.release();
return decoded;
}
/**
* Converts Android Bitmap to OpenCV Mat.
* <p>
* This function converts an Android Bitmap image to the OpenCV Mat.
* <br>'ARGB_8888' and 'RGB_565' input Bitmap formats are supported.
* <br>The output Mat is always created of the same size as the input Bitmap and of the 'CV_8UC4' type,
* it keeps the image in RGBA format.
* <br>This function throws an exception if the conversion fails.
* @param bmp is a valid input Bitmap object of the type 'ARGB_8888' or 'RGB_565'.
* @param mat is a valid output Mat object, it will be reallocated if needed, so it may be empty.
* @param unPremultiplyAlpha is a flag, that determines, whether the bitmap needs to be converted from alpha premultiplied format (like Android keeps 'ARGB_8888' ones) to regular one; this flag is ignored for 'RGB_565' bitmaps.
*/
public static void bitmapToMat(Bitmap bmp, Mat mat, boolean unPremultiplyAlpha) {
if (bmp == null)
throw new java.lang.IllegalArgumentException("bmp == null");
if (mat == null)
throw new java.lang.IllegalArgumentException("mat == null");
nBitmapToMat2(bmp, mat.nativeObj, unPremultiplyAlpha);
}
/**
* Short form of the bitmapToMat(bmp, mat, unPremultiplyAlpha=false).
* @param bmp is a valid input Bitmap object of the type 'ARGB_8888' or 'RGB_565'.
* @param mat is a valid output Mat object, it will be reallocated if needed, so Mat may be empty.
*/
public static void bitmapToMat(Bitmap bmp, Mat mat) {
bitmapToMat(bmp, mat, false);
}
/**
* Converts OpenCV Mat to Android Bitmap.
* <p>
* <br>This function converts an image in the OpenCV Mat representation to the Android Bitmap.
* <br>The input Mat object has to be of the types 'CV_8UC1' (gray-scale), 'CV_8UC3' (RGB) or 'CV_8UC4' (RGBA).
* <br>The output Bitmap object has to be of the same size as the input Mat and of the types 'ARGB_8888' or 'RGB_565'.
* <br>This function throws an exception if the conversion fails.
*
* @param mat is a valid input Mat object of types 'CV_8UC1', 'CV_8UC3' or 'CV_8UC4'.
* @param bmp is a valid Bitmap object of the same size as the Mat and of type 'ARGB_8888' or 'RGB_565'.
* @param premultiplyAlpha is a flag, that determines, whether the Mat needs to be converted to alpha premultiplied format (like Android keeps 'ARGB_8888' bitmaps); the flag is ignored for 'RGB_565' bitmaps.
*/
public static void matToBitmap(Mat mat, Bitmap bmp, boolean premultiplyAlpha) {
if (mat == null)
throw new java.lang.IllegalArgumentException("mat == null");
if (bmp == null)
throw new java.lang.IllegalArgumentException("bmp == null");
nMatToBitmap2(mat.nativeObj, bmp, premultiplyAlpha);
}
/**
* Short form of the <b>matToBitmap(mat, bmp, premultiplyAlpha=false)</b>
* @param mat is a valid input Mat object of the types 'CV_8UC1', 'CV_8UC3' or 'CV_8UC4'.
* @param bmp is a valid Bitmap object of the same size as the Mat and of type 'ARGB_8888' or 'RGB_565'.
*/
public static void matToBitmap(Mat mat, Bitmap bmp) {
matToBitmap(mat, bmp, false);
}
private static native void nBitmapToMat2(Bitmap b, long m_addr, boolean unPremultiplyAlpha);
private static native void nMatToBitmap2(long m_addr, Bitmap b, boolean premultiplyAlpha);
}

@ -1,331 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.calib3d;
import org.opencv.core.Rect;
// C++: class StereoBM
//javadoc: StereoBM
public class StereoBM extends StereoMatcher {
protected StereoBM(long addr) { super(addr); }
public static final int
PREFILTER_NORMALIZED_RESPONSE = 0,
PREFILTER_XSOBEL = 1;
//
// C++: static Ptr_StereoBM create(int numDisparities = 0, int blockSize = 21)
//
//javadoc: StereoBM::create(numDisparities, blockSize)
public static StereoBM create(int numDisparities, int blockSize)
{
StereoBM retVal = new StereoBM(create_0(numDisparities, blockSize));
return retVal;
}
//javadoc: StereoBM::create()
public static StereoBM create()
{
StereoBM retVal = new StereoBM(create_1());
return retVal;
}
//
// C++: Rect getROI1()
//
//javadoc: StereoBM::getROI1()
public Rect getROI1()
{
Rect retVal = new Rect(getROI1_0(nativeObj));
return retVal;
}
//
// C++: Rect getROI2()
//
//javadoc: StereoBM::getROI2()
public Rect getROI2()
{
Rect retVal = new Rect(getROI2_0(nativeObj));
return retVal;
}
//
// C++: int getPreFilterCap()
//
//javadoc: StereoBM::getPreFilterCap()
public int getPreFilterCap()
{
int retVal = getPreFilterCap_0(nativeObj);
return retVal;
}
//
// C++: int getPreFilterSize()
//
//javadoc: StereoBM::getPreFilterSize()
public int getPreFilterSize()
{
int retVal = getPreFilterSize_0(nativeObj);
return retVal;
}
//
// C++: int getPreFilterType()
//
//javadoc: StereoBM::getPreFilterType()
public int getPreFilterType()
{
int retVal = getPreFilterType_0(nativeObj);
return retVal;
}
//
// C++: int getSmallerBlockSize()
//
//javadoc: StereoBM::getSmallerBlockSize()
public int getSmallerBlockSize()
{
int retVal = getSmallerBlockSize_0(nativeObj);
return retVal;
}
//
// C++: int getTextureThreshold()
//
//javadoc: StereoBM::getTextureThreshold()
public int getTextureThreshold()
{
int retVal = getTextureThreshold_0(nativeObj);
return retVal;
}
//
// C++: int getUniquenessRatio()
//
//javadoc: StereoBM::getUniquenessRatio()
public int getUniquenessRatio()
{
int retVal = getUniquenessRatio_0(nativeObj);
return retVal;
}
//
// C++: void setPreFilterCap(int preFilterCap)
//
//javadoc: StereoBM::setPreFilterCap(preFilterCap)
public void setPreFilterCap(int preFilterCap)
{
setPreFilterCap_0(nativeObj, preFilterCap);
return;
}
//
// C++: void setPreFilterSize(int preFilterSize)
//
//javadoc: StereoBM::setPreFilterSize(preFilterSize)
public void setPreFilterSize(int preFilterSize)
{
setPreFilterSize_0(nativeObj, preFilterSize);
return;
}
//
// C++: void setPreFilterType(int preFilterType)
//
//javadoc: StereoBM::setPreFilterType(preFilterType)
public void setPreFilterType(int preFilterType)
{
setPreFilterType_0(nativeObj, preFilterType);
return;
}
//
// C++: void setROI1(Rect roi1)
//
//javadoc: StereoBM::setROI1(roi1)
public void setROI1(Rect roi1)
{
setROI1_0(nativeObj, roi1.x, roi1.y, roi1.width, roi1.height);
return;
}
//
// C++: void setROI2(Rect roi2)
//
//javadoc: StereoBM::setROI2(roi2)
public void setROI2(Rect roi2)
{
setROI2_0(nativeObj, roi2.x, roi2.y, roi2.width, roi2.height);
return;
}
//
// C++: void setSmallerBlockSize(int blockSize)
//
//javadoc: StereoBM::setSmallerBlockSize(blockSize)
public void setSmallerBlockSize(int blockSize)
{
setSmallerBlockSize_0(nativeObj, blockSize);
return;
}
//
// C++: void setTextureThreshold(int textureThreshold)
//
//javadoc: StereoBM::setTextureThreshold(textureThreshold)
public void setTextureThreshold(int textureThreshold)
{
setTextureThreshold_0(nativeObj, textureThreshold);
return;
}
//
// C++: void setUniquenessRatio(int uniquenessRatio)
//
//javadoc: StereoBM::setUniquenessRatio(uniquenessRatio)
public void setUniquenessRatio(int uniquenessRatio)
{
setUniquenessRatio_0(nativeObj, uniquenessRatio);
return;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: static Ptr_StereoBM create(int numDisparities = 0, int blockSize = 21)
private static native long create_0(int numDisparities, int blockSize);
private static native long create_1();
// C++: Rect getROI1()
private static native double[] getROI1_0(long nativeObj);
// C++: Rect getROI2()
private static native double[] getROI2_0(long nativeObj);
// C++: int getPreFilterCap()
private static native int getPreFilterCap_0(long nativeObj);
// C++: int getPreFilterSize()
private static native int getPreFilterSize_0(long nativeObj);
// C++: int getPreFilterType()
private static native int getPreFilterType_0(long nativeObj);
// C++: int getSmallerBlockSize()
private static native int getSmallerBlockSize_0(long nativeObj);
// C++: int getTextureThreshold()
private static native int getTextureThreshold_0(long nativeObj);
// C++: int getUniquenessRatio()
private static native int getUniquenessRatio_0(long nativeObj);
// C++: void setPreFilterCap(int preFilterCap)
private static native void setPreFilterCap_0(long nativeObj, int preFilterCap);
// C++: void setPreFilterSize(int preFilterSize)
private static native void setPreFilterSize_0(long nativeObj, int preFilterSize);
// C++: void setPreFilterType(int preFilterType)
private static native void setPreFilterType_0(long nativeObj, int preFilterType);
// C++: void setROI1(Rect roi1)
private static native void setROI1_0(long nativeObj, int roi1_x, int roi1_y, int roi1_width, int roi1_height);
// C++: void setROI2(Rect roi2)
private static native void setROI2_0(long nativeObj, int roi2_x, int roi2_y, int roi2_width, int roi2_height);
// C++: void setSmallerBlockSize(int blockSize)
private static native void setSmallerBlockSize_0(long nativeObj, int blockSize);
// C++: void setTextureThreshold(int textureThreshold)
private static native void setTextureThreshold_0(long nativeObj, int textureThreshold);
// C++: void setUniquenessRatio(int uniquenessRatio)
private static native void setUniquenessRatio_0(long nativeObj, int uniquenessRatio);
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -1,254 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.calib3d;
import org.opencv.core.Algorithm;
import org.opencv.core.Mat;
// C++: class StereoMatcher
//javadoc: StereoMatcher
public class StereoMatcher extends Algorithm {
protected StereoMatcher(long addr) { super(addr); }
public static final int
DISP_SHIFT = 4,
DISP_SCALE = (1 << DISP_SHIFT);
//
// C++: int getBlockSize()
//
//javadoc: StereoMatcher::getBlockSize()
public int getBlockSize()
{
int retVal = getBlockSize_0(nativeObj);
return retVal;
}
//
// C++: int getDisp12MaxDiff()
//
//javadoc: StereoMatcher::getDisp12MaxDiff()
public int getDisp12MaxDiff()
{
int retVal = getDisp12MaxDiff_0(nativeObj);
return retVal;
}
//
// C++: int getMinDisparity()
//
//javadoc: StereoMatcher::getMinDisparity()
public int getMinDisparity()
{
int retVal = getMinDisparity_0(nativeObj);
return retVal;
}
//
// C++: int getNumDisparities()
//
//javadoc: StereoMatcher::getNumDisparities()
public int getNumDisparities()
{
int retVal = getNumDisparities_0(nativeObj);
return retVal;
}
//
// C++: int getSpeckleRange()
//
//javadoc: StereoMatcher::getSpeckleRange()
public int getSpeckleRange()
{
int retVal = getSpeckleRange_0(nativeObj);
return retVal;
}
//
// C++: int getSpeckleWindowSize()
//
//javadoc: StereoMatcher::getSpeckleWindowSize()
public int getSpeckleWindowSize()
{
int retVal = getSpeckleWindowSize_0(nativeObj);
return retVal;
}
//
// C++: void compute(Mat left, Mat right, Mat& disparity)
//
//javadoc: StereoMatcher::compute(left, right, disparity)
public void compute(Mat left, Mat right, Mat disparity)
{
compute_0(nativeObj, left.nativeObj, right.nativeObj, disparity.nativeObj);
return;
}
//
// C++: void setBlockSize(int blockSize)
//
//javadoc: StereoMatcher::setBlockSize(blockSize)
public void setBlockSize(int blockSize)
{
setBlockSize_0(nativeObj, blockSize);
return;
}
//
// C++: void setDisp12MaxDiff(int disp12MaxDiff)
//
//javadoc: StereoMatcher::setDisp12MaxDiff(disp12MaxDiff)
public void setDisp12MaxDiff(int disp12MaxDiff)
{
setDisp12MaxDiff_0(nativeObj, disp12MaxDiff);
return;
}
//
// C++: void setMinDisparity(int minDisparity)
//
//javadoc: StereoMatcher::setMinDisparity(minDisparity)
public void setMinDisparity(int minDisparity)
{
setMinDisparity_0(nativeObj, minDisparity);
return;
}
//
// C++: void setNumDisparities(int numDisparities)
//
//javadoc: StereoMatcher::setNumDisparities(numDisparities)
public void setNumDisparities(int numDisparities)
{
setNumDisparities_0(nativeObj, numDisparities);
return;
}
//
// C++: void setSpeckleRange(int speckleRange)
//
//javadoc: StereoMatcher::setSpeckleRange(speckleRange)
public void setSpeckleRange(int speckleRange)
{
setSpeckleRange_0(nativeObj, speckleRange);
return;
}
//
// C++: void setSpeckleWindowSize(int speckleWindowSize)
//
//javadoc: StereoMatcher::setSpeckleWindowSize(speckleWindowSize)
public void setSpeckleWindowSize(int speckleWindowSize)
{
setSpeckleWindowSize_0(nativeObj, speckleWindowSize);
return;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: int getBlockSize()
private static native int getBlockSize_0(long nativeObj);
// C++: int getDisp12MaxDiff()
private static native int getDisp12MaxDiff_0(long nativeObj);
// C++: int getMinDisparity()
private static native int getMinDisparity_0(long nativeObj);
// C++: int getNumDisparities()
private static native int getNumDisparities_0(long nativeObj);
// C++: int getSpeckleRange()
private static native int getSpeckleRange_0(long nativeObj);
// C++: int getSpeckleWindowSize()
private static native int getSpeckleWindowSize_0(long nativeObj);
// C++: void compute(Mat left, Mat right, Mat& disparity)
private static native void compute_0(long nativeObj, long left_nativeObj, long right_nativeObj, long disparity_nativeObj);
// C++: void setBlockSize(int blockSize)
private static native void setBlockSize_0(long nativeObj, int blockSize);
// C++: void setDisp12MaxDiff(int disp12MaxDiff)
private static native void setDisp12MaxDiff_0(long nativeObj, int disp12MaxDiff);
// C++: void setMinDisparity(int minDisparity)
private static native void setMinDisparity_0(long nativeObj, int minDisparity);
// C++: void setNumDisparities(int numDisparities)
private static native void setNumDisparities_0(long nativeObj, int numDisparities);
// C++: void setSpeckleRange(int speckleRange)
private static native void setSpeckleRange_0(long nativeObj, int speckleRange);
// C++: void setSpeckleWindowSize(int speckleWindowSize)
private static native void setSpeckleWindowSize_0(long nativeObj, int speckleWindowSize);
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -1,231 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.calib3d;
// C++: class StereoSGBM
//javadoc: StereoSGBM
public class StereoSGBM extends StereoMatcher {
protected StereoSGBM(long addr) { super(addr); }
public static final int
MODE_SGBM = 0,
MODE_HH = 1,
MODE_SGBM_3WAY = 2,
MODE_HH4 = 3;
//
// C++: static Ptr_StereoSGBM create(int minDisparity = 0, int numDisparities = 16, int blockSize = 3, int P1 = 0, int P2 = 0, int disp12MaxDiff = 0, int preFilterCap = 0, int uniquenessRatio = 0, int speckleWindowSize = 0, int speckleRange = 0, int mode = StereoSGBM::MODE_SGBM)
//
//javadoc: StereoSGBM::create(minDisparity, numDisparities, blockSize, P1, P2, disp12MaxDiff, preFilterCap, uniquenessRatio, speckleWindowSize, speckleRange, mode)
public static StereoSGBM create(int minDisparity, int numDisparities, int blockSize, int P1, int P2, int disp12MaxDiff, int preFilterCap, int uniquenessRatio, int speckleWindowSize, int speckleRange, int mode)
{
StereoSGBM retVal = new StereoSGBM(create_0(minDisparity, numDisparities, blockSize, P1, P2, disp12MaxDiff, preFilterCap, uniquenessRatio, speckleWindowSize, speckleRange, mode));
return retVal;
}
//javadoc: StereoSGBM::create()
public static StereoSGBM create()
{
StereoSGBM retVal = new StereoSGBM(create_1());
return retVal;
}
//
// C++: int getMode()
//
//javadoc: StereoSGBM::getMode()
public int getMode()
{
int retVal = getMode_0(nativeObj);
return retVal;
}
//
// C++: int getP1()
//
//javadoc: StereoSGBM::getP1()
public int getP1()
{
int retVal = getP1_0(nativeObj);
return retVal;
}
//
// C++: int getP2()
//
//javadoc: StereoSGBM::getP2()
public int getP2()
{
int retVal = getP2_0(nativeObj);
return retVal;
}
//
// C++: int getPreFilterCap()
//
//javadoc: StereoSGBM::getPreFilterCap()
public int getPreFilterCap()
{
int retVal = getPreFilterCap_0(nativeObj);
return retVal;
}
//
// C++: int getUniquenessRatio()
//
//javadoc: StereoSGBM::getUniquenessRatio()
public int getUniquenessRatio()
{
int retVal = getUniquenessRatio_0(nativeObj);
return retVal;
}
//
// C++: void setMode(int mode)
//
//javadoc: StereoSGBM::setMode(mode)
public void setMode(int mode)
{
setMode_0(nativeObj, mode);
return;
}
//
// C++: void setP1(int P1)
//
//javadoc: StereoSGBM::setP1(P1)
public void setP1(int P1)
{
setP1_0(nativeObj, P1);
return;
}
//
// C++: void setP2(int P2)
//
//javadoc: StereoSGBM::setP2(P2)
public void setP2(int P2)
{
setP2_0(nativeObj, P2);
return;
}
//
// C++: void setPreFilterCap(int preFilterCap)
//
//javadoc: StereoSGBM::setPreFilterCap(preFilterCap)
public void setPreFilterCap(int preFilterCap)
{
setPreFilterCap_0(nativeObj, preFilterCap);
return;
}
//
// C++: void setUniquenessRatio(int uniquenessRatio)
//
//javadoc: StereoSGBM::setUniquenessRatio(uniquenessRatio)
public void setUniquenessRatio(int uniquenessRatio)
{
setUniquenessRatio_0(nativeObj, uniquenessRatio);
return;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: static Ptr_StereoSGBM create(int minDisparity = 0, int numDisparities = 16, int blockSize = 3, int P1 = 0, int P2 = 0, int disp12MaxDiff = 0, int preFilterCap = 0, int uniquenessRatio = 0, int speckleWindowSize = 0, int speckleRange = 0, int mode = StereoSGBM::MODE_SGBM)
private static native long create_0(int minDisparity, int numDisparities, int blockSize, int P1, int P2, int disp12MaxDiff, int preFilterCap, int uniquenessRatio, int speckleWindowSize, int speckleRange, int mode);
private static native long create_1();
// C++: int getMode()
private static native int getMode_0(long nativeObj);
// C++: int getP1()
private static native int getP1_0(long nativeObj);
// C++: int getP2()
private static native int getP2_0(long nativeObj);
// C++: int getPreFilterCap()
private static native int getPreFilterCap_0(long nativeObj);
// C++: int getUniquenessRatio()
private static native int getUniquenessRatio_0(long nativeObj);
// C++: void setMode(int mode)
private static native void setMode_0(long nativeObj, int mode);
// C++: void setP1(int P1)
private static native void setP1_0(long nativeObj, int P1);
// C++: void setP2(int P2)
private static native void setP2_0(long nativeObj, int P2);
// C++: void setPreFilterCap(int preFilterCap)
private static native void setPreFilterCap_0(long nativeObj, int preFilterCap);
// C++: void setUniquenessRatio(int uniquenessRatio)
private static native void setUniquenessRatio_0(long nativeObj, int uniquenessRatio);
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -1,111 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.core;
import java.lang.String;
// C++: class Algorithm
//javadoc: Algorithm
public class Algorithm {
protected final long nativeObj;
protected Algorithm(long addr) { nativeObj = addr; }
public long getNativeObjAddr() { return nativeObj; }
//
// C++: String getDefaultName()
//
//javadoc: Algorithm::getDefaultName()
public String getDefaultName()
{
String retVal = getDefaultName_0(nativeObj);
return retVal;
}
//
// C++: bool empty()
//
//javadoc: Algorithm::empty()
public boolean empty()
{
boolean retVal = empty_0(nativeObj);
return retVal;
}
//
// C++: void clear()
//
//javadoc: Algorithm::clear()
public void clear()
{
clear_0(nativeObj);
return;
}
//
// C++: void read(FileNode fn)
//
// Unknown type 'FileNode' (I), skipping the function
//
// C++: void save(String filename)
//
//javadoc: Algorithm::save(filename)
public void save(String filename)
{
save_0(nativeObj, filename);
return;
}
//
// C++: void write(Ptr_FileStorage fs, String name = String())
//
// Unknown type 'Ptr_FileStorage' (I), skipping the function
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: String getDefaultName()
private static native String getDefaultName_0(long nativeObj);
// C++: bool empty()
private static native boolean empty_0(long nativeObj);
// C++: void clear()
private static native void clear_0(long nativeObj);
// C++: void save(String filename)
private static native void save_0(long nativeObj, String filename);
// native support for java finalize()
private static native void delete(long nativeObj);
}

File diff suppressed because it is too large Load Diff

@ -1,15 +0,0 @@
package org.opencv.core;
public class CvException extends RuntimeException {
private static final long serialVersionUID = 1L;
public CvException(String msg) {
super(msg);
}
@Override
public String toString() {
return "CvException [" + super.toString() + "]";
}
}

@ -1,136 +0,0 @@
package org.opencv.core;
public final class CvType {
// type depth constants
public static final int
CV_8U = 0, CV_8S = 1,
CV_16U = 2, CV_16S = 3,
CV_32S = 4,
CV_32F = 5,
CV_64F = 6,
CV_USRTYPE1 = 7;
// predefined type constants
public static final int
CV_8UC1 = CV_8UC(1), CV_8UC2 = CV_8UC(2), CV_8UC3 = CV_8UC(3), CV_8UC4 = CV_8UC(4),
CV_8SC1 = CV_8SC(1), CV_8SC2 = CV_8SC(2), CV_8SC3 = CV_8SC(3), CV_8SC4 = CV_8SC(4),
CV_16UC1 = CV_16UC(1), CV_16UC2 = CV_16UC(2), CV_16UC3 = CV_16UC(3), CV_16UC4 = CV_16UC(4),
CV_16SC1 = CV_16SC(1), CV_16SC2 = CV_16SC(2), CV_16SC3 = CV_16SC(3), CV_16SC4 = CV_16SC(4),
CV_32SC1 = CV_32SC(1), CV_32SC2 = CV_32SC(2), CV_32SC3 = CV_32SC(3), CV_32SC4 = CV_32SC(4),
CV_32FC1 = CV_32FC(1), CV_32FC2 = CV_32FC(2), CV_32FC3 = CV_32FC(3), CV_32FC4 = CV_32FC(4),
CV_64FC1 = CV_64FC(1), CV_64FC2 = CV_64FC(2), CV_64FC3 = CV_64FC(3), CV_64FC4 = CV_64FC(4);
private static final int CV_CN_MAX = 512, CV_CN_SHIFT = 3, CV_DEPTH_MAX = (1 << CV_CN_SHIFT);
public static final int makeType(int depth, int channels) {
if (channels <= 0 || channels >= CV_CN_MAX) {
throw new java.lang.UnsupportedOperationException(
"Channels count should be 1.." + (CV_CN_MAX - 1));
}
if (depth < 0 || depth >= CV_DEPTH_MAX) {
throw new java.lang.UnsupportedOperationException(
"Data type depth should be 0.." + (CV_DEPTH_MAX - 1));
}
return (depth & (CV_DEPTH_MAX - 1)) + ((channels - 1) << CV_CN_SHIFT);
}
public static final int CV_8UC(int ch) {
return makeType(CV_8U, ch);
}
public static final int CV_8SC(int ch) {
return makeType(CV_8S, ch);
}
public static final int CV_16UC(int ch) {
return makeType(CV_16U, ch);
}
public static final int CV_16SC(int ch) {
return makeType(CV_16S, ch);
}
public static final int CV_32SC(int ch) {
return makeType(CV_32S, ch);
}
public static final int CV_32FC(int ch) {
return makeType(CV_32F, ch);
}
public static final int CV_64FC(int ch) {
return makeType(CV_64F, ch);
}
public static final int channels(int type) {
return (type >> CV_CN_SHIFT) + 1;
}
public static final int depth(int type) {
return type & (CV_DEPTH_MAX - 1);
}
public static final boolean isInteger(int type) {
return depth(type) < CV_32F;
}
public static final int ELEM_SIZE(int type) {
switch (depth(type)) {
case CV_8U:
case CV_8S:
return channels(type);
case CV_16U:
case CV_16S:
return 2 * channels(type);
case CV_32S:
case CV_32F:
return 4 * channels(type);
case CV_64F:
return 8 * channels(type);
default:
throw new java.lang.UnsupportedOperationException(
"Unsupported CvType value: " + type);
}
}
public static final String typeToString(int type) {
String s;
switch (depth(type)) {
case CV_8U:
s = "CV_8U";
break;
case CV_8S:
s = "CV_8S";
break;
case CV_16U:
s = "CV_16U";
break;
case CV_16S:
s = "CV_16S";
break;
case CV_32S:
s = "CV_32S";
break;
case CV_32F:
s = "CV_32F";
break;
case CV_64F:
s = "CV_64F";
break;
case CV_USRTYPE1:
s = "CV_USRTYPE1";
break;
default:
throw new java.lang.UnsupportedOperationException(
"Unsupported CvType value: " + type);
}
int ch = channels(type);
if (ch <= 4)
return s + "C" + ch;
else
return s + "C(" + ch + ")";
}
}

@ -1,58 +0,0 @@
package org.opencv.core;
//C++: class DMatch
/**
* Structure for matching: query descriptor index, train descriptor index, train
* image index and distance between descriptors.
*/
public class DMatch {
/**
* Query descriptor index.
*/
public int queryIdx;
/**
* Train descriptor index.
*/
public int trainIdx;
/**
* Train image index.
*/
public int imgIdx;
// javadoc: DMatch::distance
public float distance;
// javadoc: DMatch::DMatch()
public DMatch() {
this(-1, -1, Float.MAX_VALUE);
}
// javadoc: DMatch::DMatch(_queryIdx, _trainIdx, _distance)
public DMatch(int _queryIdx, int _trainIdx, float _distance) {
queryIdx = _queryIdx;
trainIdx = _trainIdx;
imgIdx = -1;
distance = _distance;
}
// javadoc: DMatch::DMatch(_queryIdx, _trainIdx, _imgIdx, _distance)
public DMatch(int _queryIdx, int _trainIdx, int _imgIdx, float _distance) {
queryIdx = _queryIdx;
trainIdx = _trainIdx;
imgIdx = _imgIdx;
distance = _distance;
}
public boolean lessThan(DMatch it) {
return distance < it.distance;
}
@Override
public String toString() {
return "DMatch [queryIdx=" + queryIdx + ", trainIdx=" + trainIdx
+ ", imgIdx=" + imgIdx + ", distance=" + distance + "]";
}
}

@ -1,83 +0,0 @@
package org.opencv.core;
import org.opencv.core.Point;
//javadoc: KeyPoint
public class KeyPoint {
/**
* Coordinates of the keypoint.
*/
public Point pt;
/**
* Diameter of the useful keypoint adjacent area.
*/
public float size;
/**
* Computed orientation of the keypoint (-1 if not applicable).
*/
public float angle;
/**
* The response, by which the strongest keypoints have been selected. Can
* be used for further sorting or subsampling.
*/
public float response;
/**
* Octave (pyramid layer), from which the keypoint has been extracted.
*/
public int octave;
/**
* Object ID, that can be used to cluster keypoints by an object they
* belong to.
*/
public int class_id;
// javadoc:KeyPoint::KeyPoint(x,y,_size,_angle,_response,_octave,_class_id)
public KeyPoint(float x, float y, float _size, float _angle, float _response, int _octave, int _class_id)
{
pt = new Point(x, y);
size = _size;
angle = _angle;
response = _response;
octave = _octave;
class_id = _class_id;
}
// javadoc: KeyPoint::KeyPoint()
public KeyPoint()
{
this(0, 0, 0, -1, 0, 0, -1);
}
// javadoc: KeyPoint::KeyPoint(x, y, _size, _angle, _response, _octave)
public KeyPoint(float x, float y, float _size, float _angle, float _response, int _octave)
{
this(x, y, _size, _angle, _response, _octave, -1);
}
// javadoc: KeyPoint::KeyPoint(x, y, _size, _angle, _response)
public KeyPoint(float x, float y, float _size, float _angle, float _response)
{
this(x, y, _size, _angle, _response, 0, -1);
}
// javadoc: KeyPoint::KeyPoint(x, y, _size, _angle)
public KeyPoint(float x, float y, float _size, float _angle)
{
this(x, y, _size, _angle, 0, 0, -1);
}
// javadoc: KeyPoint::KeyPoint(x, y, _size)
public KeyPoint(float x, float y, float _size)
{
this(x, y, _size, -1, 0, 0, -1);
}
@Override
public String toString() {
return "KeyPoint [pt=" + pt + ", size=" + size + ", angle=" + angle
+ ", response=" + response + ", octave=" + octave
+ ", class_id=" + class_id + "]";
}
}

File diff suppressed because it is too large Load Diff

@ -1,79 +0,0 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfByte extends Mat {
// 8UC(x)
private static final int _depth = CvType.CV_8U;
private static final int _channels = 1;
public MatOfByte() {
super();
}
protected MatOfByte(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfByte fromNativeAddr(long addr) {
return new MatOfByte(addr);
}
public MatOfByte(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfByte(byte...a) {
super();
fromArray(a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(byte...a) {
if(a==null || a.length==0)
return;
int num = a.length / _channels;
alloc(num);
put(0, 0, a); //TODO: check ret val!
}
public byte[] toArray() {
int num = checkVector(_channels, _depth);
if(num < 0)
throw new RuntimeException("Native Mat has unexpected type or size: " + toString());
byte[] a = new byte[num * _channels];
if(num == 0)
return a;
get(0, 0, a); //TODO: check ret val!
return a;
}
public void fromList(List<Byte> lb) {
if(lb==null || lb.size()==0)
return;
Byte ab[] = lb.toArray(new Byte[0]);
byte a[] = new byte[ab.length];
for(int i=0; i<ab.length; i++)
a[i] = ab[i];
fromArray(a);
}
public List<Byte> toList() {
byte[] a = toArray();
Byte ab[] = new Byte[a.length];
for(int i=0; i<a.length; i++)
ab[i] = a[i];
return Arrays.asList(ab);
}
}

@ -1,83 +0,0 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
import org.opencv.core.DMatch;
public class MatOfDMatch extends Mat {
// 32FC4
private static final int _depth = CvType.CV_32F;
private static final int _channels = 4;
public MatOfDMatch() {
super();
}
protected MatOfDMatch(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat: " + toString());
//FIXME: do we need release() here?
}
public static MatOfDMatch fromNativeAddr(long addr) {
return new MatOfDMatch(addr);
}
public MatOfDMatch(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat: " + toString());
//FIXME: do we need release() here?
}
public MatOfDMatch(DMatch...ap) {
super();
fromArray(ap);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(DMatch...a) {
if(a==null || a.length==0)
return;
int num = a.length;
alloc(num);
float buff[] = new float[num * _channels];
for(int i=0; i<num; i++) {
DMatch m = a[i];
buff[_channels*i+0] = m.queryIdx;
buff[_channels*i+1] = m.trainIdx;
buff[_channels*i+2] = m.imgIdx;
buff[_channels*i+3] = m.distance;
}
put(0, 0, buff); //TODO: check ret val!
}
public DMatch[] toArray() {
int num = (int) total();
DMatch[] a = new DMatch[num];
if(num == 0)
return a;
float buff[] = new float[num * _channels];
get(0, 0, buff); //TODO: check ret val!
for(int i=0; i<num; i++)
a[i] = new DMatch((int) buff[_channels*i+0], (int) buff[_channels*i+1], (int) buff[_channels*i+2], buff[_channels*i+3]);
return a;
}
public void fromList(List<DMatch> ldm) {
DMatch adm[] = ldm.toArray(new DMatch[0]);
fromArray(adm);
}
public List<DMatch> toList() {
DMatch[] adm = toArray();
return Arrays.asList(adm);
}
}

@ -1,79 +0,0 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfDouble extends Mat {
// 64FC(x)
private static final int _depth = CvType.CV_64F;
private static final int _channels = 1;
public MatOfDouble() {
super();
}
protected MatOfDouble(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfDouble fromNativeAddr(long addr) {
return new MatOfDouble(addr);
}
public MatOfDouble(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfDouble(double...a) {
super();
fromArray(a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(double...a) {
if(a==null || a.length==0)
return;
int num = a.length / _channels;
alloc(num);
put(0, 0, a); //TODO: check ret val!
}
public double[] toArray() {
int num = checkVector(_channels, _depth);
if(num < 0)
throw new RuntimeException("Native Mat has unexpected type or size: " + toString());
double[] a = new double[num * _channels];
if(num == 0)
return a;
get(0, 0, a); //TODO: check ret val!
return a;
}
public void fromList(List<Double> lb) {
if(lb==null || lb.size()==0)
return;
Double ab[] = lb.toArray(new Double[0]);
double a[] = new double[ab.length];
for(int i=0; i<ab.length; i++)
a[i] = ab[i];
fromArray(a);
}
public List<Double> toList() {
double[] a = toArray();
Double ab[] = new Double[a.length];
for(int i=0; i<a.length; i++)
ab[i] = a[i];
return Arrays.asList(ab);
}
}

@ -1,79 +0,0 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfFloat extends Mat {
// 32FC1
private static final int _depth = CvType.CV_32F;
private static final int _channels = 1;
public MatOfFloat() {
super();
}
protected MatOfFloat(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfFloat fromNativeAddr(long addr) {
return new MatOfFloat(addr);
}
public MatOfFloat(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfFloat(float...a) {
super();
fromArray(a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(float...a) {
if(a==null || a.length==0)
return;
int num = a.length / _channels;
alloc(num);
put(0, 0, a); //TODO: check ret val!
}
public float[] toArray() {
int num = checkVector(_channels, _depth);
if(num < 0)
throw new RuntimeException("Native Mat has unexpected type or size: " + toString());
float[] a = new float[num * _channels];
if(num == 0)
return a;
get(0, 0, a); //TODO: check ret val!
return a;
}
public void fromList(List<Float> lb) {
if(lb==null || lb.size()==0)
return;
Float ab[] = lb.toArray(new Float[0]);
float a[] = new float[ab.length];
for(int i=0; i<ab.length; i++)
a[i] = ab[i];
fromArray(a);
}
public List<Float> toList() {
float[] a = toArray();
Float ab[] = new Float[a.length];
for(int i=0; i<a.length; i++)
ab[i] = a[i];
return Arrays.asList(ab);
}
}

@ -1,79 +0,0 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfFloat4 extends Mat {
// 32FC4
private static final int _depth = CvType.CV_32F;
private static final int _channels = 4;
public MatOfFloat4() {
super();
}
protected MatOfFloat4(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfFloat4 fromNativeAddr(long addr) {
return new MatOfFloat4(addr);
}
public MatOfFloat4(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfFloat4(float...a) {
super();
fromArray(a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(float...a) {
if(a==null || a.length==0)
return;
int num = a.length / _channels;
alloc(num);
put(0, 0, a); //TODO: check ret val!
}
public float[] toArray() {
int num = checkVector(_channels, _depth);
if(num < 0)
throw new RuntimeException("Native Mat has unexpected type or size: " + toString());
float[] a = new float[num * _channels];
if(num == 0)
return a;
get(0, 0, a); //TODO: check ret val!
return a;
}
public void fromList(List<Float> lb) {
if(lb==null || lb.size()==0)
return;
Float ab[] = lb.toArray(new Float[0]);
float a[] = new float[ab.length];
for(int i=0; i<ab.length; i++)
a[i] = ab[i];
fromArray(a);
}
public List<Float> toList() {
float[] a = toArray();
Float ab[] = new Float[a.length];
for(int i=0; i<a.length; i++)
ab[i] = a[i];
return Arrays.asList(ab);
}
}

@ -1,79 +0,0 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfFloat6 extends Mat {
// 32FC6
private static final int _depth = CvType.CV_32F;
private static final int _channels = 6;
public MatOfFloat6() {
super();
}
protected MatOfFloat6(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfFloat6 fromNativeAddr(long addr) {
return new MatOfFloat6(addr);
}
public MatOfFloat6(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfFloat6(float...a) {
super();
fromArray(a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(float...a) {
if(a==null || a.length==0)
return;
int num = a.length / _channels;
alloc(num);
put(0, 0, a); //TODO: check ret val!
}
public float[] toArray() {
int num = checkVector(_channels, _depth);
if(num < 0)
throw new RuntimeException("Native Mat has unexpected type or size: " + toString());
float[] a = new float[num * _channels];
if(num == 0)
return a;
get(0, 0, a); //TODO: check ret val!
return a;
}
public void fromList(List<Float> lb) {
if(lb==null || lb.size()==0)
return;
Float ab[] = lb.toArray(new Float[0]);
float a[] = new float[ab.length];
for(int i=0; i<ab.length; i++)
a[i] = ab[i];
fromArray(a);
}
public List<Float> toList() {
float[] a = toArray();
Float ab[] = new Float[a.length];
for(int i=0; i<a.length; i++)
ab[i] = a[i];
return Arrays.asList(ab);
}
}

@ -1,80 +0,0 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfInt extends Mat {
// 32SC1
private static final int _depth = CvType.CV_32S;
private static final int _channels = 1;
public MatOfInt() {
super();
}
protected MatOfInt(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfInt fromNativeAddr(long addr) {
return new MatOfInt(addr);
}
public MatOfInt(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfInt(int...a) {
super();
fromArray(a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(int...a) {
if(a==null || a.length==0)
return;
int num = a.length / _channels;
alloc(num);
put(0, 0, a); //TODO: check ret val!
}
public int[] toArray() {
int num = checkVector(_channels, _depth);
if(num < 0)
throw new RuntimeException("Native Mat has unexpected type or size: " + toString());
int[] a = new int[num * _channels];
if(num == 0)
return a;
get(0, 0, a); //TODO: check ret val!
return a;
}
public void fromList(List<Integer> lb) {
if(lb==null || lb.size()==0)
return;
Integer ab[] = lb.toArray(new Integer[0]);
int a[] = new int[ab.length];
for(int i=0; i<ab.length; i++)
a[i] = ab[i];
fromArray(a);
}
public List<Integer> toList() {
int[] a = toArray();
Integer ab[] = new Integer[a.length];
for(int i=0; i<a.length; i++)
ab[i] = a[i];
return Arrays.asList(ab);
}
}

@ -1,80 +0,0 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfInt4 extends Mat {
// 32SC4
private static final int _depth = CvType.CV_32S;
private static final int _channels = 4;
public MatOfInt4() {
super();
}
protected MatOfInt4(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfInt4 fromNativeAddr(long addr) {
return new MatOfInt4(addr);
}
public MatOfInt4(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfInt4(int...a) {
super();
fromArray(a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(int...a) {
if(a==null || a.length==0)
return;
int num = a.length / _channels;
alloc(num);
put(0, 0, a); //TODO: check ret val!
}
public int[] toArray() {
int num = checkVector(_channels, _depth);
if(num < 0)
throw new RuntimeException("Native Mat has unexpected type or size: " + toString());
int[] a = new int[num * _channels];
if(num == 0)
return a;
get(0, 0, a); //TODO: check ret val!
return a;
}
public void fromList(List<Integer> lb) {
if(lb==null || lb.size()==0)
return;
Integer ab[] = lb.toArray(new Integer[0]);
int a[] = new int[ab.length];
for(int i=0; i<ab.length; i++)
a[i] = ab[i];
fromArray(a);
}
public List<Integer> toList() {
int[] a = toArray();
Integer ab[] = new Integer[a.length];
for(int i=0; i<a.length; i++)
ab[i] = a[i];
return Arrays.asList(ab);
}
}

@ -1,86 +0,0 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
import org.opencv.core.KeyPoint;
public class MatOfKeyPoint extends Mat {
// 32FC7
private static final int _depth = CvType.CV_32F;
private static final int _channels = 7;
public MatOfKeyPoint() {
super();
}
protected MatOfKeyPoint(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfKeyPoint fromNativeAddr(long addr) {
return new MatOfKeyPoint(addr);
}
public MatOfKeyPoint(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfKeyPoint(KeyPoint...a) {
super();
fromArray(a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(KeyPoint...a) {
if(a==null || a.length==0)
return;
int num = a.length;
alloc(num);
float buff[] = new float[num * _channels];
for(int i=0; i<num; i++) {
KeyPoint kp = a[i];
buff[_channels*i+0] = (float) kp.pt.x;
buff[_channels*i+1] = (float) kp.pt.y;
buff[_channels*i+2] = kp.size;
buff[_channels*i+3] = kp.angle;
buff[_channels*i+4] = kp.response;
buff[_channels*i+5] = kp.octave;
buff[_channels*i+6] = kp.class_id;
}
put(0, 0, buff); //TODO: check ret val!
}
public KeyPoint[] toArray() {
int num = (int) total();
KeyPoint[] a = new KeyPoint[num];
if(num == 0)
return a;
float buff[] = new float[num * _channels];
get(0, 0, buff); //TODO: check ret val!
for(int i=0; i<num; i++)
a[i] = new KeyPoint( buff[_channels*i+0], buff[_channels*i+1], buff[_channels*i+2], buff[_channels*i+3],
buff[_channels*i+4], (int) buff[_channels*i+5], (int) buff[_channels*i+6] );
return a;
}
public void fromList(List<KeyPoint> lkp) {
KeyPoint akp[] = lkp.toArray(new KeyPoint[0]);
fromArray(akp);
}
public List<KeyPoint> toList() {
KeyPoint[] akp = toArray();
return Arrays.asList(akp);
}
}

@ -1,78 +0,0 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfPoint extends Mat {
// 32SC2
private static final int _depth = CvType.CV_32S;
private static final int _channels = 2;
public MatOfPoint() {
super();
}
protected MatOfPoint(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfPoint fromNativeAddr(long addr) {
return new MatOfPoint(addr);
}
public MatOfPoint(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfPoint(Point...a) {
super();
fromArray(a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(Point...a) {
if(a==null || a.length==0)
return;
int num = a.length;
alloc(num);
int buff[] = new int[num * _channels];
for(int i=0; i<num; i++) {
Point p = a[i];
buff[_channels*i+0] = (int) p.x;
buff[_channels*i+1] = (int) p.y;
}
put(0, 0, buff); //TODO: check ret val!
}
public Point[] toArray() {
int num = (int) total();
Point[] ap = new Point[num];
if(num == 0)
return ap;
int buff[] = new int[num * _channels];
get(0, 0, buff); //TODO: check ret val!
for(int i=0; i<num; i++)
ap[i] = new Point(buff[i*_channels], buff[i*_channels+1]);
return ap;
}
public void fromList(List<Point> lp) {
Point ap[] = lp.toArray(new Point[0]);
fromArray(ap);
}
public List<Point> toList() {
Point[] ap = toArray();
return Arrays.asList(ap);
}
}

@ -1,78 +0,0 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfPoint2f extends Mat {
// 32FC2
private static final int _depth = CvType.CV_32F;
private static final int _channels = 2;
public MatOfPoint2f() {
super();
}
protected MatOfPoint2f(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfPoint2f fromNativeAddr(long addr) {
return new MatOfPoint2f(addr);
}
public MatOfPoint2f(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfPoint2f(Point...a) {
super();
fromArray(a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(Point...a) {
if(a==null || a.length==0)
return;
int num = a.length;
alloc(num);
float buff[] = new float[num * _channels];
for(int i=0; i<num; i++) {
Point p = a[i];
buff[_channels*i+0] = (float) p.x;
buff[_channels*i+1] = (float) p.y;
}
put(0, 0, buff); //TODO: check ret val!
}
public Point[] toArray() {
int num = (int) total();
Point[] ap = new Point[num];
if(num == 0)
return ap;
float buff[] = new float[num * _channels];
get(0, 0, buff); //TODO: check ret val!
for(int i=0; i<num; i++)
ap[i] = new Point(buff[i*_channels], buff[i*_channels+1]);
return ap;
}
public void fromList(List<Point> lp) {
Point ap[] = lp.toArray(new Point[0]);
fromArray(ap);
}
public List<Point> toList() {
Point[] ap = toArray();
return Arrays.asList(ap);
}
}

@ -1,79 +0,0 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfPoint3 extends Mat {
// 32SC3
private static final int _depth = CvType.CV_32S;
private static final int _channels = 3;
public MatOfPoint3() {
super();
}
protected MatOfPoint3(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfPoint3 fromNativeAddr(long addr) {
return new MatOfPoint3(addr);
}
public MatOfPoint3(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfPoint3(Point3...a) {
super();
fromArray(a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(Point3...a) {
if(a==null || a.length==0)
return;
int num = a.length;
alloc(num);
int buff[] = new int[num * _channels];
for(int i=0; i<num; i++) {
Point3 p = a[i];
buff[_channels*i+0] = (int) p.x;
buff[_channels*i+1] = (int) p.y;
buff[_channels*i+2] = (int) p.z;
}
put(0, 0, buff); //TODO: check ret val!
}
public Point3[] toArray() {
int num = (int) total();
Point3[] ap = new Point3[num];
if(num == 0)
return ap;
int buff[] = new int[num * _channels];
get(0, 0, buff); //TODO: check ret val!
for(int i=0; i<num; i++)
ap[i] = new Point3(buff[i*_channels], buff[i*_channels+1], buff[i*_channels+2]);
return ap;
}
public void fromList(List<Point3> lp) {
Point3 ap[] = lp.toArray(new Point3[0]);
fromArray(ap);
}
public List<Point3> toList() {
Point3[] ap = toArray();
return Arrays.asList(ap);
}
}

@ -1,79 +0,0 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfPoint3f extends Mat {
// 32FC3
private static final int _depth = CvType.CV_32F;
private static final int _channels = 3;
public MatOfPoint3f() {
super();
}
protected MatOfPoint3f(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfPoint3f fromNativeAddr(long addr) {
return new MatOfPoint3f(addr);
}
public MatOfPoint3f(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfPoint3f(Point3...a) {
super();
fromArray(a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(Point3...a) {
if(a==null || a.length==0)
return;
int num = a.length;
alloc(num);
float buff[] = new float[num * _channels];
for(int i=0; i<num; i++) {
Point3 p = a[i];
buff[_channels*i+0] = (float) p.x;
buff[_channels*i+1] = (float) p.y;
buff[_channels*i+2] = (float) p.z;
}
put(0, 0, buff); //TODO: check ret val!
}
public Point3[] toArray() {
int num = (int) total();
Point3[] ap = new Point3[num];
if(num == 0)
return ap;
float buff[] = new float[num * _channels];
get(0, 0, buff); //TODO: check ret val!
for(int i=0; i<num; i++)
ap[i] = new Point3(buff[i*_channels], buff[i*_channels+1], buff[i*_channels+2]);
return ap;
}
public void fromList(List<Point3> lp) {
Point3 ap[] = lp.toArray(new Point3[0]);
fromArray(ap);
}
public List<Point3> toList() {
Point3[] ap = toArray();
return Arrays.asList(ap);
}
}

@ -1,81 +0,0 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfRect extends Mat {
// 32SC4
private static final int _depth = CvType.CV_32S;
private static final int _channels = 4;
public MatOfRect() {
super();
}
protected MatOfRect(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfRect fromNativeAddr(long addr) {
return new MatOfRect(addr);
}
public MatOfRect(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfRect(Rect...a) {
super();
fromArray(a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(Rect...a) {
if(a==null || a.length==0)
return;
int num = a.length;
alloc(num);
int buff[] = new int[num * _channels];
for(int i=0; i<num; i++) {
Rect r = a[i];
buff[_channels*i+0] = (int) r.x;
buff[_channels*i+1] = (int) r.y;
buff[_channels*i+2] = (int) r.width;
buff[_channels*i+3] = (int) r.height;
}
put(0, 0, buff); //TODO: check ret val!
}
public Rect[] toArray() {
int num = (int) total();
Rect[] a = new Rect[num];
if(num == 0)
return a;
int buff[] = new int[num * _channels];
get(0, 0, buff); //TODO: check ret val!
for(int i=0; i<num; i++)
a[i] = new Rect(buff[i*_channels], buff[i*_channels+1], buff[i*_channels+2], buff[i*_channels+3]);
return a;
}
public void fromList(List<Rect> lr) {
Rect ap[] = lr.toArray(new Rect[0]);
fromArray(ap);
}
public List<Rect> toList() {
Rect[] ar = toArray();
return Arrays.asList(ar);
}
}

@ -1,81 +0,0 @@
package org.opencv.core;
import java.util.Arrays;
import java.util.List;
public class MatOfRect2d extends Mat {
// 64FC4
private static final int _depth = CvType.CV_64F;
private static final int _channels = 4;
public MatOfRect2d() {
super();
}
protected MatOfRect2d(long addr) {
super(addr);
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public static MatOfRect2d fromNativeAddr(long addr) {
return new MatOfRect2d(addr);
}
public MatOfRect2d(Mat m) {
super(m, Range.all());
if( !empty() && checkVector(_channels, _depth) < 0 )
throw new IllegalArgumentException("Incompatible Mat");
//FIXME: do we need release() here?
}
public MatOfRect2d(Rect2d...a) {
super();
fromArray(a);
}
public void alloc(int elemNumber) {
if(elemNumber>0)
super.create(elemNumber, 1, CvType.makeType(_depth, _channels));
}
public void fromArray(Rect2d...a) {
if(a==null || a.length==0)
return;
int num = a.length;
alloc(num);
double buff[] = new double[num * _channels];
for(int i=0; i<num; i++) {
Rect2d r = a[i];
buff[_channels*i+0] = (double) r.x;
buff[_channels*i+1] = (double) r.y;
buff[_channels*i+2] = (double) r.width;
buff[_channels*i+3] = (double) r.height;
}
put(0, 0, buff); //TODO: check ret val!
}
public Rect2d[] toArray() {
int num = (int) total();
Rect2d[] a = new Rect2d[num];
if(num == 0)
return a;
double buff[] = new double[num * _channels];
get(0, 0, buff); //TODO: check ret val!
for(int i=0; i<num; i++)
a[i] = new Rect2d(buff[i*_channels], buff[i*_channels+1], buff[i*_channels+2], buff[i*_channels+3]);
return a;
}
public void fromList(List<Rect2d> lr) {
Rect2d ap[] = lr.toArray(new Rect2d[0]);
fromArray(ap);
}
public List<Rect2d> toList() {
Rect2d[] ar = toArray();
return Arrays.asList(ar);
}
}

@ -1,68 +0,0 @@
package org.opencv.core;
//javadoc:Point_
public class Point {
public double x, y;
public Point(double x, double y) {
this.x = x;
this.y = y;
}
public Point() {
this(0, 0);
}
public Point(double[] vals) {
this();
set(vals);
}
public void set(double[] vals) {
if (vals != null) {
x = vals.length > 0 ? vals[0] : 0;
y = vals.length > 1 ? vals[1] : 0;
} else {
x = 0;
y = 0;
}
}
public Point clone() {
return new Point(x, y);
}
public double dot(Point p) {
return x * p.x + y * p.y;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
long temp;
temp = Double.doubleToLongBits(x);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(y);
result = prime * result + (int) (temp ^ (temp >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (!(obj instanceof Point)) return false;
Point it = (Point) obj;
return x == it.x && y == it.y;
}
public boolean inside(Rect r) {
return r.contains(this);
}
@Override
public String toString() {
return "{" + x + ", " + y + "}";
}
}

@ -1,79 +0,0 @@
package org.opencv.core;
//javadoc:Point3_
public class Point3 {
public double x, y, z;
public Point3(double x, double y, double z) {
this.x = x;
this.y = y;
this.z = z;
}
public Point3() {
this(0, 0, 0);
}
public Point3(Point p) {
x = p.x;
y = p.y;
z = 0;
}
public Point3(double[] vals) {
this();
set(vals);
}
public void set(double[] vals) {
if (vals != null) {
x = vals.length > 0 ? vals[0] : 0;
y = vals.length > 1 ? vals[1] : 0;
z = vals.length > 2 ? vals[2] : 0;
} else {
x = 0;
y = 0;
z = 0;
}
}
public Point3 clone() {
return new Point3(x, y, z);
}
public double dot(Point3 p) {
return x * p.x + y * p.y + z * p.z;
}
public Point3 cross(Point3 p) {
return new Point3(y * p.z - z * p.y, z * p.x - x * p.z, x * p.y - y * p.x);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
long temp;
temp = Double.doubleToLongBits(x);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(y);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(z);
result = prime * result + (int) (temp ^ (temp >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (!(obj instanceof Point3)) return false;
Point3 it = (Point3) obj;
return x == it.x && y == it.y && z == it.z;
}
@Override
public String toString() {
return "{" + x + ", " + y + ", " + z + "}";
}
}

@ -1,82 +0,0 @@
package org.opencv.core;
//javadoc:Range
public class Range {
public int start, end;
public Range(int s, int e) {
this.start = s;
this.end = e;
}
public Range() {
this(0, 0);
}
public Range(double[] vals) {
set(vals);
}
public void set(double[] vals) {
if (vals != null) {
start = vals.length > 0 ? (int) vals[0] : 0;
end = vals.length > 1 ? (int) vals[1] : 0;
} else {
start = 0;
end = 0;
}
}
public int size() {
return empty() ? 0 : end - start;
}
public boolean empty() {
return end <= start;
}
public static Range all() {
return new Range(Integer.MIN_VALUE, Integer.MAX_VALUE);
}
public Range intersection(Range r1) {
Range r = new Range(Math.max(r1.start, this.start), Math.min(r1.end, this.end));
r.end = Math.max(r.end, r.start);
return r;
}
public Range shift(int delta) {
return new Range(start + delta, end + delta);
}
public Range clone() {
return new Range(start, end);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
long temp;
temp = Double.doubleToLongBits(start);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(end);
result = prime * result + (int) (temp ^ (temp >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (!(obj instanceof Range)) return false;
Range it = (Range) obj;
return start == it.start && end == it.end;
}
@Override
public String toString() {
return "[" + start + ", " + end + ")";
}
}

@ -1,104 +0,0 @@
package org.opencv.core;
//javadoc:Rect_
public class Rect {
public int x, y, width, height;
public Rect(int x, int y, int width, int height) {
this.x = x;
this.y = y;
this.width = width;
this.height = height;
}
public Rect() {
this(0, 0, 0, 0);
}
public Rect(Point p1, Point p2) {
x = (int) (p1.x < p2.x ? p1.x : p2.x);
y = (int) (p1.y < p2.y ? p1.y : p2.y);
width = (int) (p1.x > p2.x ? p1.x : p2.x) - x;
height = (int) (p1.y > p2.y ? p1.y : p2.y) - y;
}
public Rect(Point p, Size s) {
this((int) p.x, (int) p.y, (int) s.width, (int) s.height);
}
public Rect(double[] vals) {
set(vals);
}
public void set(double[] vals) {
if (vals != null) {
x = vals.length > 0 ? (int) vals[0] : 0;
y = vals.length > 1 ? (int) vals[1] : 0;
width = vals.length > 2 ? (int) vals[2] : 0;
height = vals.length > 3 ? (int) vals[3] : 0;
} else {
x = 0;
y = 0;
width = 0;
height = 0;
}
}
public Rect clone() {
return new Rect(x, y, width, height);
}
public Point tl() {
return new Point(x, y);
}
public Point br() {
return new Point(x + width, y + height);
}
public Size size() {
return new Size(width, height);
}
public double area() {
return width * height;
}
public boolean empty() {
return width <= 0 || height <= 0;
}
public boolean contains(Point p) {
return x <= p.x && p.x < x + width && y <= p.y && p.y < y + height;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
long temp;
temp = Double.doubleToLongBits(height);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(width);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(x);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(y);
result = prime * result + (int) (temp ^ (temp >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (!(obj instanceof Rect)) return false;
Rect it = (Rect) obj;
return x == it.x && y == it.y && width == it.width && height == it.height;
}
@Override
public String toString() {
return "{" + x + ", " + y + ", " + width + "x" + height + "}";
}
}

@ -1,104 +0,0 @@
package org.opencv.core;
//javadoc:Rect2d_
public class Rect2d {
public double x, y, width, height;
public Rect2d(double x, double y, double width, double height) {
this.x = x;
this.y = y;
this.width = width;
this.height = height;
}
public Rect2d() {
this(0, 0, 0, 0);
}
public Rect2d(Point p1, Point p2) {
x = (double) (p1.x < p2.x ? p1.x : p2.x);
y = (double) (p1.y < p2.y ? p1.y : p2.y);
width = (double) (p1.x > p2.x ? p1.x : p2.x) - x;
height = (double) (p1.y > p2.y ? p1.y : p2.y) - y;
}
public Rect2d(Point p, Size s) {
this((double) p.x, (double) p.y, (double) s.width, (double) s.height);
}
public Rect2d(double[] vals) {
set(vals);
}
public void set(double[] vals) {
if (vals != null) {
x = vals.length > 0 ? (double) vals[0] : 0;
y = vals.length > 1 ? (double) vals[1] : 0;
width = vals.length > 2 ? (double) vals[2] : 0;
height = vals.length > 3 ? (double) vals[3] : 0;
} else {
x = 0;
y = 0;
width = 0;
height = 0;
}
}
public Rect2d clone() {
return new Rect2d(x, y, width, height);
}
public Point tl() {
return new Point(x, y);
}
public Point br() {
return new Point(x + width, y + height);
}
public Size size() {
return new Size(width, height);
}
public double area() {
return width * height;
}
public boolean empty() {
return width <= 0 || height <= 0;
}
public boolean contains(Point p) {
return x <= p.x && p.x < x + width && y <= p.y && p.y < y + height;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
long temp;
temp = Double.doubleToLongBits(height);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(width);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(x);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(y);
result = prime * result + (int) (temp ^ (temp >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (!(obj instanceof Rect2d)) return false;
Rect2d it = (Rect2d) obj;
return x == it.x && y == it.y && width == it.width && height == it.height;
}
@Override
public String toString() {
return "{" + x + ", " + y + ", " + width + "x" + height + "}";
}
}

@ -1,113 +0,0 @@
package org.opencv.core;
//javadoc:RotatedRect_
public class RotatedRect {
public Point center;
public Size size;
public double angle;
public RotatedRect() {
this.center = new Point();
this.size = new Size();
this.angle = 0;
}
public RotatedRect(Point c, Size s, double a) {
this.center = c.clone();
this.size = s.clone();
this.angle = a;
}
public RotatedRect(double[] vals) {
this();
set(vals);
}
public void set(double[] vals) {
if (vals != null) {
center.x = vals.length > 0 ? (double) vals[0] : 0;
center.y = vals.length > 1 ? (double) vals[1] : 0;
size.width = vals.length > 2 ? (double) vals[2] : 0;
size.height = vals.length > 3 ? (double) vals[3] : 0;
angle = vals.length > 4 ? (double) vals[4] : 0;
} else {
center.x = 0;
center.x = 0;
size.width = 0;
size.height = 0;
angle = 0;
}
}
public void points(Point pt[])
{
double _angle = angle * Math.PI / 180.0;
double b = (double) Math.cos(_angle) * 0.5f;
double a = (double) Math.sin(_angle) * 0.5f;
pt[0] = new Point(
center.x - a * size.height - b * size.width,
center.y + b * size.height - a * size.width);
pt[1] = new Point(
center.x + a * size.height - b * size.width,
center.y - b * size.height - a * size.width);
pt[2] = new Point(
2 * center.x - pt[0].x,
2 * center.y - pt[0].y);
pt[3] = new Point(
2 * center.x - pt[1].x,
2 * center.y - pt[1].y);
}
public Rect boundingRect()
{
Point pt[] = new Point[4];
points(pt);
Rect r = new Rect((int) Math.floor(Math.min(Math.min(Math.min(pt[0].x, pt[1].x), pt[2].x), pt[3].x)),
(int) Math.floor(Math.min(Math.min(Math.min(pt[0].y, pt[1].y), pt[2].y), pt[3].y)),
(int) Math.ceil(Math.max(Math.max(Math.max(pt[0].x, pt[1].x), pt[2].x), pt[3].x)),
(int) Math.ceil(Math.max(Math.max(Math.max(pt[0].y, pt[1].y), pt[2].y), pt[3].y)));
r.width -= r.x - 1;
r.height -= r.y - 1;
return r;
}
public RotatedRect clone() {
return new RotatedRect(center, size, angle);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
long temp;
temp = Double.doubleToLongBits(center.x);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(center.y);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(size.width);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(size.height);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(angle);
result = prime * result + (int) (temp ^ (temp >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (!(obj instanceof RotatedRect)) return false;
RotatedRect it = (RotatedRect) obj;
return center.equals(it.center) && size.equals(it.size) && angle == it.angle;
}
@Override
public String toString() {
return "{ " + center + " " + size + " * " + angle + " }";
}
}

@ -1,90 +0,0 @@
package org.opencv.core;
//javadoc:Scalar_
public class Scalar {
public double val[];
public Scalar(double v0, double v1, double v2, double v3) {
val = new double[] { v0, v1, v2, v3 };
}
public Scalar(double v0, double v1, double v2) {
val = new double[] { v0, v1, v2, 0 };
}
public Scalar(double v0, double v1) {
val = new double[] { v0, v1, 0, 0 };
}
public Scalar(double v0) {
val = new double[] { v0, 0, 0, 0 };
}
public Scalar(double[] vals) {
if (vals != null && vals.length == 4)
val = vals.clone();
else {
val = new double[4];
set(vals);
}
}
public void set(double[] vals) {
if (vals != null) {
val[0] = vals.length > 0 ? vals[0] : 0;
val[1] = vals.length > 1 ? vals[1] : 0;
val[2] = vals.length > 2 ? vals[2] : 0;
val[3] = vals.length > 3 ? vals[3] : 0;
} else
val[0] = val[1] = val[2] = val[3] = 0;
}
public static Scalar all(double v) {
return new Scalar(v, v, v, v);
}
public Scalar clone() {
return new Scalar(val);
}
public Scalar mul(Scalar it, double scale) {
return new Scalar(val[0] * it.val[0] * scale, val[1] * it.val[1] * scale,
val[2] * it.val[2] * scale, val[3] * it.val[3] * scale);
}
public Scalar mul(Scalar it) {
return mul(it, 1);
}
public Scalar conj() {
return new Scalar(val[0], -val[1], -val[2], -val[3]);
}
public boolean isReal() {
return val[1] == 0 && val[2] == 0 && val[3] == 0;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + java.util.Arrays.hashCode(val);
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (!(obj instanceof Scalar)) return false;
Scalar it = (Scalar) obj;
if (!java.util.Arrays.equals(val, it.val)) return false;
return true;
}
@Override
public String toString() {
return "[" + val[0] + ", " + val[1] + ", " + val[2] + ", " + val[3] + "]";
}
}

@ -1,73 +0,0 @@
package org.opencv.core;
//javadoc:Size_
public class Size {
public double width, height;
public Size(double width, double height) {
this.width = width;
this.height = height;
}
public Size() {
this(0, 0);
}
public Size(Point p) {
width = p.x;
height = p.y;
}
public Size(double[] vals) {
set(vals);
}
public void set(double[] vals) {
if (vals != null) {
width = vals.length > 0 ? vals[0] : 0;
height = vals.length > 1 ? vals[1] : 0;
} else {
width = 0;
height = 0;
}
}
public double area() {
return width * height;
}
public boolean empty() {
return width <= 0 || height <= 0;
}
public Size clone() {
return new Size(width, height);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
long temp;
temp = Double.doubleToLongBits(height);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(width);
result = prime * result + (int) (temp ^ (temp >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (!(obj instanceof Size)) return false;
Size it = (Size) obj;
return width == it.width && height == it.height;
}
@Override
public String toString() {
return (int)width + "x" + (int)height;
}
}

@ -1,92 +0,0 @@
package org.opencv.core;
//javadoc:TermCriteria
public class TermCriteria {
/**
* The maximum number of iterations or elements to compute
*/
public static final int COUNT = 1;
/**
* The maximum number of iterations or elements to compute
*/
public static final int MAX_ITER = COUNT;
/**
* The desired accuracy threshold or change in parameters at which the iterative algorithm is terminated.
*/
public static final int EPS = 2;
public int type;
public int maxCount;
public double epsilon;
/**
* Termination criteria for iterative algorithms.
*
* @param type
* the type of termination criteria: COUNT, EPS or COUNT + EPS.
* @param maxCount
* the maximum number of iterations/elements.
* @param epsilon
* the desired accuracy.
*/
public TermCriteria(int type, int maxCount, double epsilon) {
this.type = type;
this.maxCount = maxCount;
this.epsilon = epsilon;
}
/**
* Termination criteria for iterative algorithms.
*/
public TermCriteria() {
this(0, 0, 0.0);
}
public TermCriteria(double[] vals) {
set(vals);
}
public void set(double[] vals) {
if (vals != null) {
type = vals.length > 0 ? (int) vals[0] : 0;
maxCount = vals.length > 1 ? (int) vals[1] : 0;
epsilon = vals.length > 2 ? (double) vals[2] : 0;
} else {
type = 0;
maxCount = 0;
epsilon = 0;
}
}
public TermCriteria clone() {
return new TermCriteria(type, maxCount, epsilon);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
long temp;
temp = Double.doubleToLongBits(type);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(maxCount);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(epsilon);
result = prime * result + (int) (temp ^ (temp >>> 32));
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (!(obj instanceof TermCriteria)) return false;
TermCriteria it = (TermCriteria) obj;
return type == it.type && maxCount == it.maxCount && epsilon == it.epsilon;
}
@Override
public String toString() {
return "{ type: " + type + ", maxCount: " + maxCount + ", epsilon: " + epsilon + "}";
}
}

@ -1,182 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.core;
// C++: class TickMeter
//javadoc: TickMeter
public class TickMeter {
protected final long nativeObj;
protected TickMeter(long addr) { nativeObj = addr; }
public long getNativeObjAddr() { return nativeObj; }
//
// C++: TickMeter()
//
//javadoc: TickMeter::TickMeter()
public TickMeter()
{
nativeObj = TickMeter_0();
return;
}
//
// C++: double getTimeMicro()
//
//javadoc: TickMeter::getTimeMicro()
public double getTimeMicro()
{
double retVal = getTimeMicro_0(nativeObj);
return retVal;
}
//
// C++: double getTimeMilli()
//
//javadoc: TickMeter::getTimeMilli()
public double getTimeMilli()
{
double retVal = getTimeMilli_0(nativeObj);
return retVal;
}
//
// C++: double getTimeSec()
//
//javadoc: TickMeter::getTimeSec()
public double getTimeSec()
{
double retVal = getTimeSec_0(nativeObj);
return retVal;
}
//
// C++: int64 getCounter()
//
//javadoc: TickMeter::getCounter()
public long getCounter()
{
long retVal = getCounter_0(nativeObj);
return retVal;
}
//
// C++: int64 getTimeTicks()
//
//javadoc: TickMeter::getTimeTicks()
public long getTimeTicks()
{
long retVal = getTimeTicks_0(nativeObj);
return retVal;
}
//
// C++: void reset()
//
//javadoc: TickMeter::reset()
public void reset()
{
reset_0(nativeObj);
return;
}
//
// C++: void start()
//
//javadoc: TickMeter::start()
public void start()
{
start_0(nativeObj);
return;
}
//
// C++: void stop()
//
//javadoc: TickMeter::stop()
public void stop()
{
stop_0(nativeObj);
return;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: TickMeter()
private static native long TickMeter_0();
// C++: double getTimeMicro()
private static native double getTimeMicro_0(long nativeObj);
// C++: double getTimeMilli()
private static native double getTimeMilli_0(long nativeObj);
// C++: double getTimeSec()
private static native double getTimeSec_0(long nativeObj);
// C++: int64 getCounter()
private static native long getCounter_0(long nativeObj);
// C++: int64 getTimeTicks()
private static native long getTimeTicks_0(long nativeObj);
// C++: void reset()
private static native void reset_0(long nativeObj);
// C++: void start()
private static native void start_0(long nativeObj);
// C++: void stop()
private static native void stop_0(long nativeObj);
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -1,212 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.dnn;
import java.lang.String;
// C++: class DictValue
//javadoc: DictValue
public class DictValue {
protected final long nativeObj;
protected DictValue(long addr) { nativeObj = addr; }
public long getNativeObjAddr() { return nativeObj; }
//
// C++: DictValue(String s)
//
//javadoc: DictValue::DictValue(s)
public DictValue(String s)
{
nativeObj = DictValue_0(s);
return;
}
//
// C++: DictValue(double p)
//
//javadoc: DictValue::DictValue(p)
public DictValue(double p)
{
nativeObj = DictValue_1(p);
return;
}
//
// C++: DictValue(int i)
//
//javadoc: DictValue::DictValue(i)
public DictValue(int i)
{
nativeObj = DictValue_2(i);
return;
}
//
// C++: String getStringValue(int idx = -1)
//
//javadoc: DictValue::getStringValue(idx)
public String getStringValue(int idx)
{
String retVal = getStringValue_0(nativeObj, idx);
return retVal;
}
//javadoc: DictValue::getStringValue()
public String getStringValue()
{
String retVal = getStringValue_1(nativeObj);
return retVal;
}
//
// C++: bool isInt()
//
//javadoc: DictValue::isInt()
public boolean isInt()
{
boolean retVal = isInt_0(nativeObj);
return retVal;
}
//
// C++: bool isReal()
//
//javadoc: DictValue::isReal()
public boolean isReal()
{
boolean retVal = isReal_0(nativeObj);
return retVal;
}
//
// C++: bool isString()
//
//javadoc: DictValue::isString()
public boolean isString()
{
boolean retVal = isString_0(nativeObj);
return retVal;
}
//
// C++: double getRealValue(int idx = -1)
//
//javadoc: DictValue::getRealValue(idx)
public double getRealValue(int idx)
{
double retVal = getRealValue_0(nativeObj, idx);
return retVal;
}
//javadoc: DictValue::getRealValue()
public double getRealValue()
{
double retVal = getRealValue_1(nativeObj);
return retVal;
}
//
// C++: int getIntValue(int idx = -1)
//
//javadoc: DictValue::getIntValue(idx)
public int getIntValue(int idx)
{
int retVal = getIntValue_0(nativeObj, idx);
return retVal;
}
//javadoc: DictValue::getIntValue()
public int getIntValue()
{
int retVal = getIntValue_1(nativeObj);
return retVal;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: DictValue(String s)
private static native long DictValue_0(String s);
// C++: DictValue(double p)
private static native long DictValue_1(double p);
// C++: DictValue(int i)
private static native long DictValue_2(int i);
// C++: String getStringValue(int idx = -1)
private static native String getStringValue_0(long nativeObj, int idx);
private static native String getStringValue_1(long nativeObj);
// C++: bool isInt()
private static native boolean isInt_0(long nativeObj);
// C++: bool isReal()
private static native boolean isReal_0(long nativeObj);
// C++: bool isString()
private static native boolean isString_0(long nativeObj);
// C++: double getRealValue(int idx = -1)
private static native double getRealValue_0(long nativeObj, int idx);
private static native double getRealValue_1(long nativeObj);
// C++: int getIntValue(int idx = -1)
private static native int getIntValue_0(long nativeObj, int idx);
private static native int getIntValue_1(long nativeObj);
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -1,279 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.dnn;
import java.lang.String;
import java.util.ArrayList;
import java.util.List;
import org.opencv.core.Mat;
import org.opencv.core.MatOfFloat;
import org.opencv.core.MatOfInt;
import org.opencv.core.MatOfRect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.utils.Converters;
// C++: class Dnn
//javadoc: Dnn
public class Dnn {
public static final int
DNN_BACKEND_DEFAULT = 0,
DNN_BACKEND_HALIDE = 1,
DNN_TARGET_CPU = 0,
DNN_TARGET_OPENCL = 1;
//
// C++: Mat blobFromImage(Mat image, double scalefactor = 1.0, Size size = Size(), Scalar mean = Scalar(), bool swapRB = true, bool crop = true)
//
//javadoc: blobFromImage(image, scalefactor, size, mean, swapRB, crop)
public static Mat blobFromImage(Mat image, double scalefactor, Size size, Scalar mean, boolean swapRB, boolean crop)
{
Mat retVal = new Mat(blobFromImage_0(image.nativeObj, scalefactor, size.width, size.height, mean.val[0], mean.val[1], mean.val[2], mean.val[3], swapRB, crop));
return retVal;
}
//javadoc: blobFromImage(image)
public static Mat blobFromImage(Mat image)
{
Mat retVal = new Mat(blobFromImage_1(image.nativeObj));
return retVal;
}
//
// C++: Mat blobFromImages(vector_Mat images, double scalefactor = 1.0, Size size = Size(), Scalar mean = Scalar(), bool swapRB = true, bool crop = true)
//
//javadoc: blobFromImages(images, scalefactor, size, mean, swapRB, crop)
public static Mat blobFromImages(List<Mat> images, double scalefactor, Size size, Scalar mean, boolean swapRB, boolean crop)
{
Mat images_mat = Converters.vector_Mat_to_Mat(images);
Mat retVal = new Mat(blobFromImages_0(images_mat.nativeObj, scalefactor, size.width, size.height, mean.val[0], mean.val[1], mean.val[2], mean.val[3], swapRB, crop));
return retVal;
}
//javadoc: blobFromImages(images)
public static Mat blobFromImages(List<Mat> images)
{
Mat images_mat = Converters.vector_Mat_to_Mat(images);
Mat retVal = new Mat(blobFromImages_1(images_mat.nativeObj));
return retVal;
}
//
// C++: Mat readTorchBlob(String filename, bool isBinary = true)
//
//javadoc: readTorchBlob(filename, isBinary)
public static Mat readTorchBlob(String filename, boolean isBinary)
{
Mat retVal = new Mat(readTorchBlob_0(filename, isBinary));
return retVal;
}
//javadoc: readTorchBlob(filename)
public static Mat readTorchBlob(String filename)
{
Mat retVal = new Mat(readTorchBlob_1(filename));
return retVal;
}
//
// C++: Net readNetFromCaffe(String prototxt, String caffeModel = String())
//
//javadoc: readNetFromCaffe(prototxt, caffeModel)
public static Net readNetFromCaffe(String prototxt, String caffeModel)
{
Net retVal = new Net(readNetFromCaffe_0(prototxt, caffeModel));
return retVal;
}
//javadoc: readNetFromCaffe(prototxt)
public static Net readNetFromCaffe(String prototxt)
{
Net retVal = new Net(readNetFromCaffe_1(prototxt));
return retVal;
}
//
// C++: Net readNetFromDarknet(String cfgFile, String darknetModel = String())
//
//javadoc: readNetFromDarknet(cfgFile, darknetModel)
public static Net readNetFromDarknet(String cfgFile, String darknetModel)
{
Net retVal = new Net(readNetFromDarknet_0(cfgFile, darknetModel));
return retVal;
}
//javadoc: readNetFromDarknet(cfgFile)
public static Net readNetFromDarknet(String cfgFile)
{
Net retVal = new Net(readNetFromDarknet_1(cfgFile));
return retVal;
}
//
// C++: Net readNetFromTensorflow(String model, String config = String())
//
//javadoc: readNetFromTensorflow(model, config)
public static Net readNetFromTensorflow(String model, String config)
{
Net retVal = new Net(readNetFromTensorflow_0(model, config));
return retVal;
}
//javadoc: readNetFromTensorflow(model)
public static Net readNetFromTensorflow(String model)
{
Net retVal = new Net(readNetFromTensorflow_1(model));
return retVal;
}
//
// C++: Net readNetFromTorch(String model, bool isBinary = true)
//
//javadoc: readNetFromTorch(model, isBinary)
public static Net readNetFromTorch(String model, boolean isBinary)
{
Net retVal = new Net(readNetFromTorch_0(model, isBinary));
return retVal;
}
//javadoc: readNetFromTorch(model)
public static Net readNetFromTorch(String model)
{
Net retVal = new Net(readNetFromTorch_1(model));
return retVal;
}
//
// C++: void NMSBoxes(vector_Rect bboxes, vector_float scores, float score_threshold, float nms_threshold, vector_int& indices, float eta = 1.f, int top_k = 0)
//
//javadoc: NMSBoxes(bboxes, scores, score_threshold, nms_threshold, indices, eta, top_k)
public static void NMSBoxes(MatOfRect bboxes, MatOfFloat scores, float score_threshold, float nms_threshold, MatOfInt indices, float eta, int top_k)
{
Mat bboxes_mat = bboxes;
Mat scores_mat = scores;
Mat indices_mat = indices;
NMSBoxes_0(bboxes_mat.nativeObj, scores_mat.nativeObj, score_threshold, nms_threshold, indices_mat.nativeObj, eta, top_k);
return;
}
//javadoc: NMSBoxes(bboxes, scores, score_threshold, nms_threshold, indices)
public static void NMSBoxes(MatOfRect bboxes, MatOfFloat scores, float score_threshold, float nms_threshold, MatOfInt indices)
{
Mat bboxes_mat = bboxes;
Mat scores_mat = scores;
Mat indices_mat = indices;
NMSBoxes_1(bboxes_mat.nativeObj, scores_mat.nativeObj, score_threshold, nms_threshold, indices_mat.nativeObj);
return;
}
//
// C++: void shrinkCaffeModel(String src, String dst, vector_String layersTypes = std::vector<String>())
//
//javadoc: shrinkCaffeModel(src, dst, layersTypes)
public static void shrinkCaffeModel(String src, String dst, List<String> layersTypes)
{
shrinkCaffeModel_0(src, dst, layersTypes);
return;
}
//javadoc: shrinkCaffeModel(src, dst)
public static void shrinkCaffeModel(String src, String dst)
{
shrinkCaffeModel_1(src, dst);
return;
}
// C++: Mat blobFromImage(Mat image, double scalefactor = 1.0, Size size = Size(), Scalar mean = Scalar(), bool swapRB = true, bool crop = true)
private static native long blobFromImage_0(long image_nativeObj, double scalefactor, double size_width, double size_height, double mean_val0, double mean_val1, double mean_val2, double mean_val3, boolean swapRB, boolean crop);
private static native long blobFromImage_1(long image_nativeObj);
// C++: Mat blobFromImages(vector_Mat images, double scalefactor = 1.0, Size size = Size(), Scalar mean = Scalar(), bool swapRB = true, bool crop = true)
private static native long blobFromImages_0(long images_mat_nativeObj, double scalefactor, double size_width, double size_height, double mean_val0, double mean_val1, double mean_val2, double mean_val3, boolean swapRB, boolean crop);
private static native long blobFromImages_1(long images_mat_nativeObj);
// C++: Mat readTorchBlob(String filename, bool isBinary = true)
private static native long readTorchBlob_0(String filename, boolean isBinary);
private static native long readTorchBlob_1(String filename);
// C++: Net readNetFromCaffe(String prototxt, String caffeModel = String())
private static native long readNetFromCaffe_0(String prototxt, String caffeModel);
private static native long readNetFromCaffe_1(String prototxt);
// C++: Net readNetFromDarknet(String cfgFile, String darknetModel = String())
private static native long readNetFromDarknet_0(String cfgFile, String darknetModel);
private static native long readNetFromDarknet_1(String cfgFile);
// C++: Net readNetFromTensorflow(String model, String config = String())
private static native long readNetFromTensorflow_0(String model, String config);
private static native long readNetFromTensorflow_1(String model);
// C++: Net readNetFromTorch(String model, bool isBinary = true)
private static native long readNetFromTorch_0(String model, boolean isBinary);
private static native long readNetFromTorch_1(String model);
// C++: void NMSBoxes(vector_Rect bboxes, vector_float scores, float score_threshold, float nms_threshold, vector_int& indices, float eta = 1.f, int top_k = 0)
private static native void NMSBoxes_0(long bboxes_mat_nativeObj, long scores_mat_nativeObj, float score_threshold, float nms_threshold, long indices_mat_nativeObj, float eta, int top_k);
private static native void NMSBoxes_1(long bboxes_mat_nativeObj, long scores_mat_nativeObj, float score_threshold, float nms_threshold, long indices_mat_nativeObj);
// C++: void shrinkCaffeModel(String src, String dst, vector_String layersTypes = std::vector<String>())
private static native void shrinkCaffeModel_0(String src, String dst, List<String> layersTypes);
private static native void shrinkCaffeModel_1(String src, String dst);
}

@ -1,176 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.dnn;
import java.lang.String;
import java.util.ArrayList;
import java.util.List;
import org.opencv.core.Algorithm;
import org.opencv.core.Mat;
import org.opencv.utils.Converters;
// C++: class Layer
//javadoc: Layer
public class Layer extends Algorithm {
protected Layer(long addr) { super(addr); }
//
// C++: vector_Mat finalize(vector_Mat inputs)
//
//javadoc: Layer::finalize(inputs)
public List<Mat> finalize(List<Mat> inputs)
{
Mat inputs_mat = Converters.vector_Mat_to_Mat(inputs);
List<Mat> retVal = new ArrayList<Mat>();
Mat retValMat = new Mat(finalize_0(nativeObj, inputs_mat.nativeObj));
Converters.Mat_to_vector_Mat(retValMat, retVal);
return retVal;
}
//
// C++: void finalize(vector_Mat inputs, vector_Mat& outputs)
//
//javadoc: Layer::finalize(inputs, outputs)
public void finalize(List<Mat> inputs, List<Mat> outputs)
{
Mat inputs_mat = Converters.vector_Mat_to_Mat(inputs);
Mat outputs_mat = new Mat();
finalize_1(nativeObj, inputs_mat.nativeObj, outputs_mat.nativeObj);
Converters.Mat_to_vector_Mat(outputs_mat, outputs);
outputs_mat.release();
return;
}
//
// C++: void run(vector_Mat inputs, vector_Mat& outputs, vector_Mat& internals)
//
//javadoc: Layer::run(inputs, outputs, internals)
public void run(List<Mat> inputs, List<Mat> outputs, List<Mat> internals)
{
Mat inputs_mat = Converters.vector_Mat_to_Mat(inputs);
Mat outputs_mat = new Mat();
Mat internals_mat = Converters.vector_Mat_to_Mat(internals);
run_0(nativeObj, inputs_mat.nativeObj, outputs_mat.nativeObj, internals_mat.nativeObj);
Converters.Mat_to_vector_Mat(outputs_mat, outputs);
outputs_mat.release();
Converters.Mat_to_vector_Mat(internals_mat, internals);
internals_mat.release();
return;
}
//
// C++: vector_Mat Layer::blobs
//
//javadoc: Layer::get_blobs()
public List<Mat> get_blobs()
{
List<Mat> retVal = new ArrayList<Mat>();
Mat retValMat = new Mat(get_blobs_0(nativeObj));
Converters.Mat_to_vector_Mat(retValMat, retVal);
return retVal;
}
//
// C++: void Layer::blobs
//
//javadoc: Layer::set_blobs(blobs)
public void set_blobs(List<Mat> blobs)
{
Mat blobs_mat = Converters.vector_Mat_to_Mat(blobs);
set_blobs_0(nativeObj, blobs_mat.nativeObj);
return;
}
//
// C++: String Layer::name
//
//javadoc: Layer::get_name()
public String get_name()
{
String retVal = get_name_0(nativeObj);
return retVal;
}
//
// C++: String Layer::type
//
//javadoc: Layer::get_type()
public String get_type()
{
String retVal = get_type_0(nativeObj);
return retVal;
}
//
// C++: int Layer::preferableTarget
//
//javadoc: Layer::get_preferableTarget()
public int get_preferableTarget()
{
int retVal = get_preferableTarget_0(nativeObj);
return retVal;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: vector_Mat finalize(vector_Mat inputs)
private static native long finalize_0(long nativeObj, long inputs_mat_nativeObj);
// C++: void finalize(vector_Mat inputs, vector_Mat& outputs)
private static native void finalize_1(long nativeObj, long inputs_mat_nativeObj, long outputs_mat_nativeObj);
// C++: void run(vector_Mat inputs, vector_Mat& outputs, vector_Mat& internals)
private static native void run_0(long nativeObj, long inputs_mat_nativeObj, long outputs_mat_nativeObj, long internals_mat_nativeObj);
// C++: vector_Mat Layer::blobs
private static native long get_blobs_0(long nativeObj);
// C++: void Layer::blobs
private static native void set_blobs_0(long nativeObj, long blobs_mat_nativeObj);
// C++: String Layer::name
private static native String get_name_0(long nativeObj);
// C++: String Layer::type
private static native String get_type_0(long nativeObj);
// C++: int Layer::preferableTarget
private static native int get_preferableTarget_0(long nativeObj);
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -1,601 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.dnn;
import java.lang.String;
import java.util.ArrayList;
import java.util.List;
import org.opencv.core.Mat;
import org.opencv.core.MatOfDouble;
import org.opencv.core.MatOfInt;
import org.opencv.dnn.DictValue;
import org.opencv.utils.Converters;
// C++: class Net
//javadoc: Net
public class Net {
protected final long nativeObj;
protected Net(long addr) { nativeObj = addr; }
public long getNativeObjAddr() { return nativeObj; }
//
// C++: Net()
//
//javadoc: Net::Net()
public Net()
{
nativeObj = Net_0();
return;
}
//
// C++: Mat forward(String outputName = String())
//
//javadoc: Net::forward(outputName)
public Mat forward(String outputName)
{
Mat retVal = new Mat(forward_0(nativeObj, outputName));
return retVal;
}
//javadoc: Net::forward()
public Mat forward()
{
Mat retVal = new Mat(forward_1(nativeObj));
return retVal;
}
//
// C++: Mat getParam(LayerId layer, int numParam = 0)
//
//javadoc: Net::getParam(layer, numParam)
public Mat getParam(DictValue layer, int numParam)
{
Mat retVal = new Mat(getParam_0(nativeObj, layer.getNativeObjAddr(), numParam));
return retVal;
}
//javadoc: Net::getParam(layer)
public Mat getParam(DictValue layer)
{
Mat retVal = new Mat(getParam_1(nativeObj, layer.getNativeObjAddr()));
return retVal;
}
//
// C++: Ptr_Layer getLayer(LayerId layerId)
//
//javadoc: Net::getLayer(layerId)
public Layer getLayer(DictValue layerId)
{
Layer retVal = new Layer(getLayer_0(nativeObj, layerId.getNativeObjAddr()));
return retVal;
}
//
// C++: bool empty()
//
//javadoc: Net::empty()
public boolean empty()
{
boolean retVal = empty_0(nativeObj);
return retVal;
}
//
// C++: int getLayerId(String layer)
//
//javadoc: Net::getLayerId(layer)
public int getLayerId(String layer)
{
int retVal = getLayerId_0(nativeObj, layer);
return retVal;
}
//
// C++: int getLayersCount(String layerType)
//
//javadoc: Net::getLayersCount(layerType)
public int getLayersCount(String layerType)
{
int retVal = getLayersCount_0(nativeObj, layerType);
return retVal;
}
//
// C++: int64 getFLOPS(MatShape netInputShape)
//
//javadoc: Net::getFLOPS(netInputShape)
public long getFLOPS(MatOfInt netInputShape)
{
Mat netInputShape_mat = netInputShape;
long retVal = getFLOPS_0(nativeObj, netInputShape_mat.nativeObj);
return retVal;
}
//
// C++: int64 getFLOPS(int layerId, MatShape netInputShape)
//
//javadoc: Net::getFLOPS(layerId, netInputShape)
public long getFLOPS(int layerId, MatOfInt netInputShape)
{
Mat netInputShape_mat = netInputShape;
long retVal = getFLOPS_1(nativeObj, layerId, netInputShape_mat.nativeObj);
return retVal;
}
//
// C++: int64 getFLOPS(int layerId, vector_MatShape netInputShapes)
//
//javadoc: Net::getFLOPS(layerId, netInputShapes)
public long getFLOPS(int layerId, List<MatOfInt> netInputShapes)
{
long retVal = getFLOPS_2(nativeObj, layerId, netInputShapes);
return retVal;
}
//
// C++: int64 getFLOPS(vector_MatShape netInputShapes)
//
//javadoc: Net::getFLOPS(netInputShapes)
public long getFLOPS(List<MatOfInt> netInputShapes)
{
long retVal = getFLOPS_3(nativeObj, netInputShapes);
return retVal;
}
//
// C++: int64 getPerfProfile(vector_double& timings)
//
//javadoc: Net::getPerfProfile(timings)
public long getPerfProfile(MatOfDouble timings)
{
Mat timings_mat = timings;
long retVal = getPerfProfile_0(nativeObj, timings_mat.nativeObj);
return retVal;
}
//
// C++: vector_String getLayerNames()
//
//javadoc: Net::getLayerNames()
public List<String> getLayerNames()
{
List<String> retVal = getLayerNames_0(nativeObj);
return retVal;
}
//
// C++: vector_int getUnconnectedOutLayers()
//
//javadoc: Net::getUnconnectedOutLayers()
public MatOfInt getUnconnectedOutLayers()
{
MatOfInt retVal = MatOfInt.fromNativeAddr(getUnconnectedOutLayers_0(nativeObj));
return retVal;
}
//
// C++: void connect(String outPin, String inpPin)
//
//javadoc: Net::connect(outPin, inpPin)
public void connect(String outPin, String inpPin)
{
connect_0(nativeObj, outPin, inpPin);
return;
}
//
// C++: void deleteLayer(LayerId layer)
//
//javadoc: Net::deleteLayer(layer)
public void deleteLayer(DictValue layer)
{
deleteLayer_0(nativeObj, layer.getNativeObjAddr());
return;
}
//
// C++: void enableFusion(bool fusion)
//
//javadoc: Net::enableFusion(fusion)
public void enableFusion(boolean fusion)
{
enableFusion_0(nativeObj, fusion);
return;
}
//
// C++: void forward(vector_Mat& outputBlobs, String outputName = String())
//
//javadoc: Net::forward(outputBlobs, outputName)
public void forward(List<Mat> outputBlobs, String outputName)
{
Mat outputBlobs_mat = new Mat();
forward_2(nativeObj, outputBlobs_mat.nativeObj, outputName);
Converters.Mat_to_vector_Mat(outputBlobs_mat, outputBlobs);
outputBlobs_mat.release();
return;
}
//javadoc: Net::forward(outputBlobs)
public void forward(List<Mat> outputBlobs)
{
Mat outputBlobs_mat = new Mat();
forward_3(nativeObj, outputBlobs_mat.nativeObj);
Converters.Mat_to_vector_Mat(outputBlobs_mat, outputBlobs);
outputBlobs_mat.release();
return;
}
//
// C++: void forward(vector_Mat& outputBlobs, vector_String outBlobNames)
//
//javadoc: Net::forward(outputBlobs, outBlobNames)
public void forward(List<Mat> outputBlobs, List<String> outBlobNames)
{
Mat outputBlobs_mat = new Mat();
forward_4(nativeObj, outputBlobs_mat.nativeObj, outBlobNames);
Converters.Mat_to_vector_Mat(outputBlobs_mat, outputBlobs);
outputBlobs_mat.release();
return;
}
//
// C++: void forward(vector_vector_Mat& outputBlobs, vector_String outBlobNames)
//
// Unknown type 'vector_vector_Mat' (O), skipping the function
//
// C++: void getLayerTypes(vector_String& layersTypes)
//
//javadoc: Net::getLayerTypes(layersTypes)
public void getLayerTypes(List<String> layersTypes)
{
getLayerTypes_0(nativeObj, layersTypes);
return;
}
//
// C++: void getLayersShapes(MatShape netInputShape, vector_int& layersIds, vector_vector_MatShape& inLayersShapes, vector_vector_MatShape& outLayersShapes)
//
// Unknown type 'vector_vector_MatShape' (O), skipping the function
//
// C++: void getLayersShapes(vector_MatShape netInputShapes, vector_int& layersIds, vector_vector_MatShape& inLayersShapes, vector_vector_MatShape& outLayersShapes)
//
// Unknown type 'vector_vector_MatShape' (O), skipping the function
//
// C++: void getMemoryConsumption(MatShape netInputShape, size_t& weights, size_t& blobs)
//
//javadoc: Net::getMemoryConsumption(netInputShape, weights, blobs)
public void getMemoryConsumption(MatOfInt netInputShape, long[] weights, long[] blobs)
{
Mat netInputShape_mat = netInputShape;
double[] weights_out = new double[1];
double[] blobs_out = new double[1];
getMemoryConsumption_0(nativeObj, netInputShape_mat.nativeObj, weights_out, blobs_out);
if(weights!=null) weights[0] = (long)weights_out[0];
if(blobs!=null) blobs[0] = (long)blobs_out[0];
return;
}
//
// C++: void getMemoryConsumption(int layerId, MatShape netInputShape, size_t& weights, size_t& blobs)
//
//javadoc: Net::getMemoryConsumption(layerId, netInputShape, weights, blobs)
public void getMemoryConsumption(int layerId, MatOfInt netInputShape, long[] weights, long[] blobs)
{
Mat netInputShape_mat = netInputShape;
double[] weights_out = new double[1];
double[] blobs_out = new double[1];
getMemoryConsumption_1(nativeObj, layerId, netInputShape_mat.nativeObj, weights_out, blobs_out);
if(weights!=null) weights[0] = (long)weights_out[0];
if(blobs!=null) blobs[0] = (long)blobs_out[0];
return;
}
//
// C++: void getMemoryConsumption(int layerId, vector_MatShape netInputShapes, size_t& weights, size_t& blobs)
//
//javadoc: Net::getMemoryConsumption(layerId, netInputShapes, weights, blobs)
public void getMemoryConsumption(int layerId, List<MatOfInt> netInputShapes, long[] weights, long[] blobs)
{
double[] weights_out = new double[1];
double[] blobs_out = new double[1];
getMemoryConsumption_2(nativeObj, layerId, netInputShapes, weights_out, blobs_out);
if(weights!=null) weights[0] = (long)weights_out[0];
if(blobs!=null) blobs[0] = (long)blobs_out[0];
return;
}
//
// C++: void setHalideScheduler(String scheduler)
//
//javadoc: Net::setHalideScheduler(scheduler)
public void setHalideScheduler(String scheduler)
{
setHalideScheduler_0(nativeObj, scheduler);
return;
}
//
// C++: void setInput(Mat blob, String name = "")
//
//javadoc: Net::setInput(blob, name)
public void setInput(Mat blob, String name)
{
setInput_0(nativeObj, blob.nativeObj, name);
return;
}
//javadoc: Net::setInput(blob)
public void setInput(Mat blob)
{
setInput_1(nativeObj, blob.nativeObj);
return;
}
//
// C++: void setInputsNames(vector_String inputBlobNames)
//
//javadoc: Net::setInputsNames(inputBlobNames)
public void setInputsNames(List<String> inputBlobNames)
{
setInputsNames_0(nativeObj, inputBlobNames);
return;
}
//
// C++: void setParam(LayerId layer, int numParam, Mat blob)
//
//javadoc: Net::setParam(layer, numParam, blob)
public void setParam(DictValue layer, int numParam, Mat blob)
{
setParam_0(nativeObj, layer.getNativeObjAddr(), numParam, blob.nativeObj);
return;
}
//
// C++: void setPreferableBackend(int backendId)
//
//javadoc: Net::setPreferableBackend(backendId)
public void setPreferableBackend(int backendId)
{
setPreferableBackend_0(nativeObj, backendId);
return;
}
//
// C++: void setPreferableTarget(int targetId)
//
//javadoc: Net::setPreferableTarget(targetId)
public void setPreferableTarget(int targetId)
{
setPreferableTarget_0(nativeObj, targetId);
return;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: Net()
private static native long Net_0();
// C++: Mat forward(String outputName = String())
private static native long forward_0(long nativeObj, String outputName);
private static native long forward_1(long nativeObj);
// C++: Mat getParam(LayerId layer, int numParam = 0)
private static native long getParam_0(long nativeObj, long layer_nativeObj, int numParam);
private static native long getParam_1(long nativeObj, long layer_nativeObj);
// C++: Ptr_Layer getLayer(LayerId layerId)
private static native long getLayer_0(long nativeObj, long layerId_nativeObj);
// C++: bool empty()
private static native boolean empty_0(long nativeObj);
// C++: int getLayerId(String layer)
private static native int getLayerId_0(long nativeObj, String layer);
// C++: int getLayersCount(String layerType)
private static native int getLayersCount_0(long nativeObj, String layerType);
// C++: int64 getFLOPS(MatShape netInputShape)
private static native long getFLOPS_0(long nativeObj, long netInputShape_mat_nativeObj);
// C++: int64 getFLOPS(int layerId, MatShape netInputShape)
private static native long getFLOPS_1(long nativeObj, int layerId, long netInputShape_mat_nativeObj);
// C++: int64 getFLOPS(int layerId, vector_MatShape netInputShapes)
private static native long getFLOPS_2(long nativeObj, int layerId, List<MatOfInt> netInputShapes);
// C++: int64 getFLOPS(vector_MatShape netInputShapes)
private static native long getFLOPS_3(long nativeObj, List<MatOfInt> netInputShapes);
// C++: int64 getPerfProfile(vector_double& timings)
private static native long getPerfProfile_0(long nativeObj, long timings_mat_nativeObj);
// C++: vector_String getLayerNames()
private static native List<String> getLayerNames_0(long nativeObj);
// C++: vector_int getUnconnectedOutLayers()
private static native long getUnconnectedOutLayers_0(long nativeObj);
// C++: void connect(String outPin, String inpPin)
private static native void connect_0(long nativeObj, String outPin, String inpPin);
// C++: void deleteLayer(LayerId layer)
private static native void deleteLayer_0(long nativeObj, long layer_nativeObj);
// C++: void enableFusion(bool fusion)
private static native void enableFusion_0(long nativeObj, boolean fusion);
// C++: void forward(vector_Mat& outputBlobs, String outputName = String())
private static native void forward_2(long nativeObj, long outputBlobs_mat_nativeObj, String outputName);
private static native void forward_3(long nativeObj, long outputBlobs_mat_nativeObj);
// C++: void forward(vector_Mat& outputBlobs, vector_String outBlobNames)
private static native void forward_4(long nativeObj, long outputBlobs_mat_nativeObj, List<String> outBlobNames);
// C++: void getLayerTypes(vector_String& layersTypes)
private static native void getLayerTypes_0(long nativeObj, List<String> layersTypes);
// C++: void getMemoryConsumption(MatShape netInputShape, size_t& weights, size_t& blobs)
private static native void getMemoryConsumption_0(long nativeObj, long netInputShape_mat_nativeObj, double[] weights_out, double[] blobs_out);
// C++: void getMemoryConsumption(int layerId, MatShape netInputShape, size_t& weights, size_t& blobs)
private static native void getMemoryConsumption_1(long nativeObj, int layerId, long netInputShape_mat_nativeObj, double[] weights_out, double[] blobs_out);
// C++: void getMemoryConsumption(int layerId, vector_MatShape netInputShapes, size_t& weights, size_t& blobs)
private static native void getMemoryConsumption_2(long nativeObj, int layerId, List<MatOfInt> netInputShapes, double[] weights_out, double[] blobs_out);
// C++: void setHalideScheduler(String scheduler)
private static native void setHalideScheduler_0(long nativeObj, String scheduler);
// C++: void setInput(Mat blob, String name = "")
private static native void setInput_0(long nativeObj, long blob_nativeObj, String name);
private static native void setInput_1(long nativeObj, long blob_nativeObj);
// C++: void setInputsNames(vector_String inputBlobNames)
private static native void setInputsNames_0(long nativeObj, List<String> inputBlobNames);
// C++: void setParam(LayerId layer, int numParam, Mat blob)
private static native void setParam_0(long nativeObj, long layer_nativeObj, int numParam, long blob_nativeObj);
// C++: void setPreferableBackend(int backendId)
private static native void setPreferableBackend_0(long nativeObj, int backendId);
// C++: void setPreferableTarget(int targetId)
private static native void setPreferableTarget_0(long nativeObj, int targetId);
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -1,316 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.features2d;
import java.lang.String;
// C++: class AKAZE
//javadoc: AKAZE
public class AKAZE extends Feature2D {
protected AKAZE(long addr) { super(addr); }
public static final int
DESCRIPTOR_KAZE_UPRIGHT = 2,
DESCRIPTOR_KAZE = 3,
DESCRIPTOR_MLDB_UPRIGHT = 4,
DESCRIPTOR_MLDB = 5;
//
// C++: static Ptr_AKAZE create(int descriptor_type = AKAZE::DESCRIPTOR_MLDB, int descriptor_size = 0, int descriptor_channels = 3, float threshold = 0.001f, int nOctaves = 4, int nOctaveLayers = 4, int diffusivity = KAZE::DIFF_PM_G2)
//
//javadoc: AKAZE::create(descriptor_type, descriptor_size, descriptor_channels, threshold, nOctaves, nOctaveLayers, diffusivity)
public static AKAZE create(int descriptor_type, int descriptor_size, int descriptor_channels, float threshold, int nOctaves, int nOctaveLayers, int diffusivity)
{
AKAZE retVal = new AKAZE(create_0(descriptor_type, descriptor_size, descriptor_channels, threshold, nOctaves, nOctaveLayers, diffusivity));
return retVal;
}
//javadoc: AKAZE::create()
public static AKAZE create()
{
AKAZE retVal = new AKAZE(create_1());
return retVal;
}
//
// C++: String getDefaultName()
//
//javadoc: AKAZE::getDefaultName()
public String getDefaultName()
{
String retVal = getDefaultName_0(nativeObj);
return retVal;
}
//
// C++: double getThreshold()
//
//javadoc: AKAZE::getThreshold()
public double getThreshold()
{
double retVal = getThreshold_0(nativeObj);
return retVal;
}
//
// C++: int getDescriptorChannels()
//
//javadoc: AKAZE::getDescriptorChannels()
public int getDescriptorChannels()
{
int retVal = getDescriptorChannels_0(nativeObj);
return retVal;
}
//
// C++: int getDescriptorSize()
//
//javadoc: AKAZE::getDescriptorSize()
public int getDescriptorSize()
{
int retVal = getDescriptorSize_0(nativeObj);
return retVal;
}
//
// C++: int getDescriptorType()
//
//javadoc: AKAZE::getDescriptorType()
public int getDescriptorType()
{
int retVal = getDescriptorType_0(nativeObj);
return retVal;
}
//
// C++: int getDiffusivity()
//
//javadoc: AKAZE::getDiffusivity()
public int getDiffusivity()
{
int retVal = getDiffusivity_0(nativeObj);
return retVal;
}
//
// C++: int getNOctaveLayers()
//
//javadoc: AKAZE::getNOctaveLayers()
public int getNOctaveLayers()
{
int retVal = getNOctaveLayers_0(nativeObj);
return retVal;
}
//
// C++: int getNOctaves()
//
//javadoc: AKAZE::getNOctaves()
public int getNOctaves()
{
int retVal = getNOctaves_0(nativeObj);
return retVal;
}
//
// C++: void setDescriptorChannels(int dch)
//
//javadoc: AKAZE::setDescriptorChannels(dch)
public void setDescriptorChannels(int dch)
{
setDescriptorChannels_0(nativeObj, dch);
return;
}
//
// C++: void setDescriptorSize(int dsize)
//
//javadoc: AKAZE::setDescriptorSize(dsize)
public void setDescriptorSize(int dsize)
{
setDescriptorSize_0(nativeObj, dsize);
return;
}
//
// C++: void setDescriptorType(int dtype)
//
//javadoc: AKAZE::setDescriptorType(dtype)
public void setDescriptorType(int dtype)
{
setDescriptorType_0(nativeObj, dtype);
return;
}
//
// C++: void setDiffusivity(int diff)
//
//javadoc: AKAZE::setDiffusivity(diff)
public void setDiffusivity(int diff)
{
setDiffusivity_0(nativeObj, diff);
return;
}
//
// C++: void setNOctaveLayers(int octaveLayers)
//
//javadoc: AKAZE::setNOctaveLayers(octaveLayers)
public void setNOctaveLayers(int octaveLayers)
{
setNOctaveLayers_0(nativeObj, octaveLayers);
return;
}
//
// C++: void setNOctaves(int octaves)
//
//javadoc: AKAZE::setNOctaves(octaves)
public void setNOctaves(int octaves)
{
setNOctaves_0(nativeObj, octaves);
return;
}
//
// C++: void setThreshold(double threshold)
//
//javadoc: AKAZE::setThreshold(threshold)
public void setThreshold(double threshold)
{
setThreshold_0(nativeObj, threshold);
return;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: static Ptr_AKAZE create(int descriptor_type = AKAZE::DESCRIPTOR_MLDB, int descriptor_size = 0, int descriptor_channels = 3, float threshold = 0.001f, int nOctaves = 4, int nOctaveLayers = 4, int diffusivity = KAZE::DIFF_PM_G2)
private static native long create_0(int descriptor_type, int descriptor_size, int descriptor_channels, float threshold, int nOctaves, int nOctaveLayers, int diffusivity);
private static native long create_1();
// C++: String getDefaultName()
private static native String getDefaultName_0(long nativeObj);
// C++: double getThreshold()
private static native double getThreshold_0(long nativeObj);
// C++: int getDescriptorChannels()
private static native int getDescriptorChannels_0(long nativeObj);
// C++: int getDescriptorSize()
private static native int getDescriptorSize_0(long nativeObj);
// C++: int getDescriptorType()
private static native int getDescriptorType_0(long nativeObj);
// C++: int getDiffusivity()
private static native int getDiffusivity_0(long nativeObj);
// C++: int getNOctaveLayers()
private static native int getNOctaveLayers_0(long nativeObj);
// C++: int getNOctaves()
private static native int getNOctaves_0(long nativeObj);
// C++: void setDescriptorChannels(int dch)
private static native void setDescriptorChannels_0(long nativeObj, int dch);
// C++: void setDescriptorSize(int dsize)
private static native void setDescriptorSize_0(long nativeObj, int dsize);
// C++: void setDescriptorType(int dtype)
private static native void setDescriptorType_0(long nativeObj, int dtype);
// C++: void setDiffusivity(int diff)
private static native void setDiffusivity_0(long nativeObj, int diff);
// C++: void setNOctaveLayers(int octaveLayers)
private static native void setNOctaveLayers_0(long nativeObj, int octaveLayers);
// C++: void setNOctaves(int octaves)
private static native void setNOctaves_0(long nativeObj, int octaves);
// C++: void setThreshold(double threshold)
private static native void setThreshold_0(long nativeObj, double threshold);
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -1,182 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.features2d;
import java.lang.String;
// C++: class AgastFeatureDetector
//javadoc: AgastFeatureDetector
public class AgastFeatureDetector extends Feature2D {
protected AgastFeatureDetector(long addr) { super(addr); }
public static final int
AGAST_5_8 = 0,
AGAST_7_12d = 1,
AGAST_7_12s = 2,
OAST_9_16 = 3,
THRESHOLD = 10000,
NONMAX_SUPPRESSION = 10001;
//
// C++: static Ptr_AgastFeatureDetector create(int threshold = 10, bool nonmaxSuppression = true, int type = AgastFeatureDetector::OAST_9_16)
//
//javadoc: AgastFeatureDetector::create(threshold, nonmaxSuppression, type)
public static AgastFeatureDetector create(int threshold, boolean nonmaxSuppression, int type)
{
AgastFeatureDetector retVal = new AgastFeatureDetector(create_0(threshold, nonmaxSuppression, type));
return retVal;
}
//javadoc: AgastFeatureDetector::create()
public static AgastFeatureDetector create()
{
AgastFeatureDetector retVal = new AgastFeatureDetector(create_1());
return retVal;
}
//
// C++: String getDefaultName()
//
//javadoc: AgastFeatureDetector::getDefaultName()
public String getDefaultName()
{
String retVal = getDefaultName_0(nativeObj);
return retVal;
}
//
// C++: bool getNonmaxSuppression()
//
//javadoc: AgastFeatureDetector::getNonmaxSuppression()
public boolean getNonmaxSuppression()
{
boolean retVal = getNonmaxSuppression_0(nativeObj);
return retVal;
}
//
// C++: int getThreshold()
//
//javadoc: AgastFeatureDetector::getThreshold()
public int getThreshold()
{
int retVal = getThreshold_0(nativeObj);
return retVal;
}
//
// C++: int getType()
//
//javadoc: AgastFeatureDetector::getType()
public int getType()
{
int retVal = getType_0(nativeObj);
return retVal;
}
//
// C++: void setNonmaxSuppression(bool f)
//
//javadoc: AgastFeatureDetector::setNonmaxSuppression(f)
public void setNonmaxSuppression(boolean f)
{
setNonmaxSuppression_0(nativeObj, f);
return;
}
//
// C++: void setThreshold(int threshold)
//
//javadoc: AgastFeatureDetector::setThreshold(threshold)
public void setThreshold(int threshold)
{
setThreshold_0(nativeObj, threshold);
return;
}
//
// C++: void setType(int type)
//
//javadoc: AgastFeatureDetector::setType(type)
public void setType(int type)
{
setType_0(nativeObj, type);
return;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: static Ptr_AgastFeatureDetector create(int threshold = 10, bool nonmaxSuppression = true, int type = AgastFeatureDetector::OAST_9_16)
private static native long create_0(int threshold, boolean nonmaxSuppression, int type);
private static native long create_1();
// C++: String getDefaultName()
private static native String getDefaultName_0(long nativeObj);
// C++: bool getNonmaxSuppression()
private static native boolean getNonmaxSuppression_0(long nativeObj);
// C++: int getThreshold()
private static native int getThreshold_0(long nativeObj);
// C++: int getType()
private static native int getType_0(long nativeObj);
// C++: void setNonmaxSuppression(bool f)
private static native void setNonmaxSuppression_0(long nativeObj, boolean f);
// C++: void setThreshold(int threshold)
private static native void setThreshold_0(long nativeObj, int threshold);
// C++: void setType(int type)
private static native void setType_0(long nativeObj, int type);
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -1,81 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.features2d;
// C++: class BFMatcher
//javadoc: BFMatcher
public class BFMatcher extends DescriptorMatcher {
protected BFMatcher(long addr) { super(addr); }
//
// C++: BFMatcher(int normType = NORM_L2, bool crossCheck = false)
//
//javadoc: BFMatcher::BFMatcher(normType, crossCheck)
public BFMatcher(int normType, boolean crossCheck)
{
super( BFMatcher_0(normType, crossCheck) );
return;
}
//javadoc: BFMatcher::BFMatcher()
public BFMatcher()
{
super( BFMatcher_1() );
return;
}
//
// C++: static Ptr_BFMatcher create(int normType = NORM_L2, bool crossCheck = false)
//
//javadoc: BFMatcher::create(normType, crossCheck)
public static BFMatcher create(int normType, boolean crossCheck)
{
BFMatcher retVal = new BFMatcher(create_0(normType, crossCheck));
return retVal;
}
//javadoc: BFMatcher::create()
public static BFMatcher create()
{
BFMatcher retVal = new BFMatcher(create_1());
return retVal;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: BFMatcher(int normType = NORM_L2, bool crossCheck = false)
private static native long BFMatcher_0(int normType, boolean crossCheck);
private static native long BFMatcher_1();
// C++: static Ptr_BFMatcher create(int normType = NORM_L2, bool crossCheck = false)
private static native long create_0(int normType, boolean crossCheck);
private static native long create_1();
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -1,125 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.features2d;
import java.util.ArrayList;
import java.util.List;
import org.opencv.core.Mat;
import org.opencv.core.MatOfKeyPoint;
import org.opencv.utils.Converters;
// C++: class BOWImgDescriptorExtractor
//javadoc: BOWImgDescriptorExtractor
public class BOWImgDescriptorExtractor {
protected final long nativeObj;
protected BOWImgDescriptorExtractor(long addr) { nativeObj = addr; }
public long getNativeObjAddr() { return nativeObj; }
//
// C++: BOWImgDescriptorExtractor(Ptr_DescriptorExtractor dextractor, Ptr_DescriptorMatcher dmatcher)
//
// Unknown type 'Ptr_DescriptorExtractor' (I), skipping the function
//
// C++: Mat getVocabulary()
//
//javadoc: BOWImgDescriptorExtractor::getVocabulary()
public Mat getVocabulary()
{
Mat retVal = new Mat(getVocabulary_0(nativeObj));
return retVal;
}
//
// C++: int descriptorSize()
//
//javadoc: BOWImgDescriptorExtractor::descriptorSize()
public int descriptorSize()
{
int retVal = descriptorSize_0(nativeObj);
return retVal;
}
//
// C++: int descriptorType()
//
//javadoc: BOWImgDescriptorExtractor::descriptorType()
public int descriptorType()
{
int retVal = descriptorType_0(nativeObj);
return retVal;
}
//
// C++: void compute2(Mat image, vector_KeyPoint keypoints, Mat& imgDescriptor)
//
//javadoc: BOWImgDescriptorExtractor::compute2(image, keypoints, imgDescriptor)
public void compute(Mat image, MatOfKeyPoint keypoints, Mat imgDescriptor)
{
Mat keypoints_mat = keypoints;
compute_0(nativeObj, image.nativeObj, keypoints_mat.nativeObj, imgDescriptor.nativeObj);
return;
}
//
// C++: void setVocabulary(Mat vocabulary)
//
//javadoc: BOWImgDescriptorExtractor::setVocabulary(vocabulary)
public void setVocabulary(Mat vocabulary)
{
setVocabulary_0(nativeObj, vocabulary.nativeObj);
return;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: Mat getVocabulary()
private static native long getVocabulary_0(long nativeObj);
// C++: int descriptorSize()
private static native int descriptorSize_0(long nativeObj);
// C++: int descriptorType()
private static native int descriptorType_0(long nativeObj);
// C++: void compute2(Mat image, vector_KeyPoint keypoints, Mat& imgDescriptor)
private static native void compute_0(long nativeObj, long image_nativeObj, long keypoints_mat_nativeObj, long imgDescriptor_nativeObj);
// C++: void setVocabulary(Mat vocabulary)
private static native void setVocabulary_0(long nativeObj, long vocabulary_nativeObj);
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -1,89 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.features2d;
import org.opencv.core.Mat;
import org.opencv.core.TermCriteria;
// C++: class BOWKMeansTrainer
//javadoc: BOWKMeansTrainer
public class BOWKMeansTrainer extends BOWTrainer {
protected BOWKMeansTrainer(long addr) { super(addr); }
//
// C++: BOWKMeansTrainer(int clusterCount, TermCriteria termcrit = TermCriteria(), int attempts = 3, int flags = KMEANS_PP_CENTERS)
//
//javadoc: BOWKMeansTrainer::BOWKMeansTrainer(clusterCount, termcrit, attempts, flags)
public BOWKMeansTrainer(int clusterCount, TermCriteria termcrit, int attempts, int flags)
{
super( BOWKMeansTrainer_0(clusterCount, termcrit.type, termcrit.maxCount, termcrit.epsilon, attempts, flags) );
return;
}
//javadoc: BOWKMeansTrainer::BOWKMeansTrainer(clusterCount)
public BOWKMeansTrainer(int clusterCount)
{
super( BOWKMeansTrainer_1(clusterCount) );
return;
}
//
// C++: Mat cluster(Mat descriptors)
//
//javadoc: BOWKMeansTrainer::cluster(descriptors)
public Mat cluster(Mat descriptors)
{
Mat retVal = new Mat(cluster_0(nativeObj, descriptors.nativeObj));
return retVal;
}
//
// C++: Mat cluster()
//
//javadoc: BOWKMeansTrainer::cluster()
public Mat cluster()
{
Mat retVal = new Mat(cluster_1(nativeObj));
return retVal;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: BOWKMeansTrainer(int clusterCount, TermCriteria termcrit = TermCriteria(), int attempts = 3, int flags = KMEANS_PP_CENTERS)
private static native long BOWKMeansTrainer_0(int clusterCount, int termcrit_type, int termcrit_maxCount, double termcrit_epsilon, int attempts, int flags);
private static native long BOWKMeansTrainer_1(int clusterCount);
// C++: Mat cluster(Mat descriptors)
private static native long cluster_0(long nativeObj, long descriptors_nativeObj);
// C++: Mat cluster()
private static native long cluster_1(long nativeObj);
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -1,134 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.features2d;
import java.util.ArrayList;
import java.util.List;
import org.opencv.core.Mat;
import org.opencv.utils.Converters;
// C++: class BOWTrainer
//javadoc: BOWTrainer
public class BOWTrainer {
protected final long nativeObj;
protected BOWTrainer(long addr) { nativeObj = addr; }
public long getNativeObjAddr() { return nativeObj; }
//
// C++: Mat cluster(Mat descriptors)
//
//javadoc: BOWTrainer::cluster(descriptors)
public Mat cluster(Mat descriptors)
{
Mat retVal = new Mat(cluster_0(nativeObj, descriptors.nativeObj));
return retVal;
}
//
// C++: Mat cluster()
//
//javadoc: BOWTrainer::cluster()
public Mat cluster()
{
Mat retVal = new Mat(cluster_1(nativeObj));
return retVal;
}
//
// C++: int descriptorsCount()
//
//javadoc: BOWTrainer::descriptorsCount()
public int descriptorsCount()
{
int retVal = descriptorsCount_0(nativeObj);
return retVal;
}
//
// C++: vector_Mat getDescriptors()
//
//javadoc: BOWTrainer::getDescriptors()
public List<Mat> getDescriptors()
{
List<Mat> retVal = new ArrayList<Mat>();
Mat retValMat = new Mat(getDescriptors_0(nativeObj));
Converters.Mat_to_vector_Mat(retValMat, retVal);
return retVal;
}
//
// C++: void add(Mat descriptors)
//
//javadoc: BOWTrainer::add(descriptors)
public void add(Mat descriptors)
{
add_0(nativeObj, descriptors.nativeObj);
return;
}
//
// C++: void clear()
//
//javadoc: BOWTrainer::clear()
public void clear()
{
clear_0(nativeObj);
return;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: Mat cluster(Mat descriptors)
private static native long cluster_0(long nativeObj, long descriptors_nativeObj);
// C++: Mat cluster()
private static native long cluster_1(long nativeObj);
// C++: int descriptorsCount()
private static native int descriptorsCount_0(long nativeObj);
// C++: vector_Mat getDescriptors()
private static native long getDescriptors_0(long nativeObj);
// C++: void add(Mat descriptors)
private static native void add_0(long nativeObj, long descriptors_nativeObj);
// C++: void clear()
private static native void clear_0(long nativeObj);
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -1,137 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.features2d;
import java.lang.String;
import java.util.ArrayList;
import java.util.List;
import org.opencv.core.Mat;
import org.opencv.core.MatOfFloat;
import org.opencv.core.MatOfInt;
import org.opencv.utils.Converters;
// C++: class BRISK
//javadoc: BRISK
public class BRISK extends Feature2D {
protected BRISK(long addr) { super(addr); }
//
// C++: static Ptr_BRISK create(int thresh, int octaves, vector_float radiusList, vector_int numberList, float dMax = 5.85f, float dMin = 8.2f, vector_int indexChange = std::vector<int>())
//
//javadoc: BRISK::create(thresh, octaves, radiusList, numberList, dMax, dMin, indexChange)
public static BRISK create(int thresh, int octaves, MatOfFloat radiusList, MatOfInt numberList, float dMax, float dMin, MatOfInt indexChange)
{
Mat radiusList_mat = radiusList;
Mat numberList_mat = numberList;
Mat indexChange_mat = indexChange;
BRISK retVal = new BRISK(create_0(thresh, octaves, radiusList_mat.nativeObj, numberList_mat.nativeObj, dMax, dMin, indexChange_mat.nativeObj));
return retVal;
}
//javadoc: BRISK::create(thresh, octaves, radiusList, numberList)
public static BRISK create(int thresh, int octaves, MatOfFloat radiusList, MatOfInt numberList)
{
Mat radiusList_mat = radiusList;
Mat numberList_mat = numberList;
BRISK retVal = new BRISK(create_1(thresh, octaves, radiusList_mat.nativeObj, numberList_mat.nativeObj));
return retVal;
}
//
// C++: static Ptr_BRISK create(int thresh = 30, int octaves = 3, float patternScale = 1.0f)
//
//javadoc: BRISK::create(thresh, octaves, patternScale)
public static BRISK create(int thresh, int octaves, float patternScale)
{
BRISK retVal = new BRISK(create_2(thresh, octaves, patternScale));
return retVal;
}
//javadoc: BRISK::create()
public static BRISK create()
{
BRISK retVal = new BRISK(create_3());
return retVal;
}
//
// C++: static Ptr_BRISK create(vector_float radiusList, vector_int numberList, float dMax = 5.85f, float dMin = 8.2f, vector_int indexChange = std::vector<int>())
//
//javadoc: BRISK::create(radiusList, numberList, dMax, dMin, indexChange)
public static BRISK create(MatOfFloat radiusList, MatOfInt numberList, float dMax, float dMin, MatOfInt indexChange)
{
Mat radiusList_mat = radiusList;
Mat numberList_mat = numberList;
Mat indexChange_mat = indexChange;
BRISK retVal = new BRISK(create_4(radiusList_mat.nativeObj, numberList_mat.nativeObj, dMax, dMin, indexChange_mat.nativeObj));
return retVal;
}
//javadoc: BRISK::create(radiusList, numberList)
public static BRISK create(MatOfFloat radiusList, MatOfInt numberList)
{
Mat radiusList_mat = radiusList;
Mat numberList_mat = numberList;
BRISK retVal = new BRISK(create_5(radiusList_mat.nativeObj, numberList_mat.nativeObj));
return retVal;
}
//
// C++: String getDefaultName()
//
//javadoc: BRISK::getDefaultName()
public String getDefaultName()
{
String retVal = getDefaultName_0(nativeObj);
return retVal;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: static Ptr_BRISK create(int thresh, int octaves, vector_float radiusList, vector_int numberList, float dMax = 5.85f, float dMin = 8.2f, vector_int indexChange = std::vector<int>())
private static native long create_0(int thresh, int octaves, long radiusList_mat_nativeObj, long numberList_mat_nativeObj, float dMax, float dMin, long indexChange_mat_nativeObj);
private static native long create_1(int thresh, int octaves, long radiusList_mat_nativeObj, long numberList_mat_nativeObj);
// C++: static Ptr_BRISK create(int thresh = 30, int octaves = 3, float patternScale = 1.0f)
private static native long create_2(int thresh, int octaves, float patternScale);
private static native long create_3();
// C++: static Ptr_BRISK create(vector_float radiusList, vector_int numberList, float dMax = 5.85f, float dMin = 8.2f, vector_int indexChange = std::vector<int>())
private static native long create_4(long radiusList_mat_nativeObj, long numberList_mat_nativeObj, float dMax, float dMin, long indexChange_mat_nativeObj);
private static native long create_5(long radiusList_mat_nativeObj, long numberList_mat_nativeObj);
// C++: String getDefaultName()
private static native String getDefaultName_0(long nativeObj);
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -1,197 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.features2d;
import java.lang.String;
import java.util.ArrayList;
import java.util.List;
import org.opencv.core.Mat;
import org.opencv.core.MatOfKeyPoint;
import org.opencv.utils.Converters;
// C++: class javaDescriptorExtractor
//javadoc: javaDescriptorExtractor
@Deprecated
public class DescriptorExtractor {
protected final long nativeObj;
protected DescriptorExtractor(long addr) { nativeObj = addr; }
public long getNativeObjAddr() { return nativeObj; }
private static final int
OPPONENTEXTRACTOR = 1000;
public static final int
SIFT = 1,
SURF = 2,
ORB = 3,
BRIEF = 4,
BRISK = 5,
FREAK = 6,
AKAZE = 7,
OPPONENT_SIFT = OPPONENTEXTRACTOR + SIFT,
OPPONENT_SURF = OPPONENTEXTRACTOR + SURF,
OPPONENT_ORB = OPPONENTEXTRACTOR + ORB,
OPPONENT_BRIEF = OPPONENTEXTRACTOR + BRIEF,
OPPONENT_BRISK = OPPONENTEXTRACTOR + BRISK,
OPPONENT_FREAK = OPPONENTEXTRACTOR + FREAK,
OPPONENT_AKAZE = OPPONENTEXTRACTOR + AKAZE;
//
// C++: static Ptr_javaDescriptorExtractor create(int extractorType)
//
//javadoc: javaDescriptorExtractor::create(extractorType)
public static DescriptorExtractor create(int extractorType)
{
DescriptorExtractor retVal = new DescriptorExtractor(create_0(extractorType));
return retVal;
}
//
// C++: bool empty()
//
//javadoc: javaDescriptorExtractor::empty()
public boolean empty()
{
boolean retVal = empty_0(nativeObj);
return retVal;
}
//
// C++: int descriptorSize()
//
//javadoc: javaDescriptorExtractor::descriptorSize()
public int descriptorSize()
{
int retVal = descriptorSize_0(nativeObj);
return retVal;
}
//
// C++: int descriptorType()
//
//javadoc: javaDescriptorExtractor::descriptorType()
public int descriptorType()
{
int retVal = descriptorType_0(nativeObj);
return retVal;
}
//
// C++: void compute(Mat image, vector_KeyPoint& keypoints, Mat descriptors)
//
//javadoc: javaDescriptorExtractor::compute(image, keypoints, descriptors)
public void compute(Mat image, MatOfKeyPoint keypoints, Mat descriptors)
{
Mat keypoints_mat = keypoints;
compute_0(nativeObj, image.nativeObj, keypoints_mat.nativeObj, descriptors.nativeObj);
return;
}
//
// C++: void compute(vector_Mat images, vector_vector_KeyPoint& keypoints, vector_Mat& descriptors)
//
//javadoc: javaDescriptorExtractor::compute(images, keypoints, descriptors)
public void compute(List<Mat> images, List<MatOfKeyPoint> keypoints, List<Mat> descriptors)
{
Mat images_mat = Converters.vector_Mat_to_Mat(images);
List<Mat> keypoints_tmplm = new ArrayList<Mat>((keypoints != null) ? keypoints.size() : 0);
Mat keypoints_mat = Converters.vector_vector_KeyPoint_to_Mat(keypoints, keypoints_tmplm);
Mat descriptors_mat = new Mat();
compute_1(nativeObj, images_mat.nativeObj, keypoints_mat.nativeObj, descriptors_mat.nativeObj);
Converters.Mat_to_vector_vector_KeyPoint(keypoints_mat, keypoints);
keypoints_mat.release();
Converters.Mat_to_vector_Mat(descriptors_mat, descriptors);
descriptors_mat.release();
return;
}
//
// C++: void read(String fileName)
//
//javadoc: javaDescriptorExtractor::read(fileName)
public void read(String fileName)
{
read_0(nativeObj, fileName);
return;
}
//
// C++: void write(String fileName)
//
//javadoc: javaDescriptorExtractor::write(fileName)
public void write(String fileName)
{
write_0(nativeObj, fileName);
return;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: static Ptr_javaDescriptorExtractor create(int extractorType)
private static native long create_0(int extractorType);
// C++: bool empty()
private static native boolean empty_0(long nativeObj);
// C++: int descriptorSize()
private static native int descriptorSize_0(long nativeObj);
// C++: int descriptorType()
private static native int descriptorType_0(long nativeObj);
// C++: void compute(Mat image, vector_KeyPoint& keypoints, Mat descriptors)
private static native void compute_0(long nativeObj, long image_nativeObj, long keypoints_mat_nativeObj, long descriptors_nativeObj);
// C++: void compute(vector_Mat images, vector_vector_KeyPoint& keypoints, vector_Mat& descriptors)
private static native void compute_1(long nativeObj, long images_mat_nativeObj, long keypoints_mat_nativeObj, long descriptors_mat_nativeObj);
// C++: void read(String fileName)
private static native void read_0(long nativeObj, String fileName);
// C++: void write(String fileName)
private static native void write_0(long nativeObj, String fileName);
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -1,426 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.features2d;
import java.lang.String;
import java.util.ArrayList;
import java.util.List;
import org.opencv.core.Algorithm;
import org.opencv.core.Mat;
import org.opencv.core.MatOfDMatch;
import org.opencv.utils.Converters;
// C++: class DescriptorMatcher
//javadoc: DescriptorMatcher
public class DescriptorMatcher extends Algorithm {
protected DescriptorMatcher(long addr) { super(addr); }
public static final int
FLANNBASED = 1,
BRUTEFORCE = 2,
BRUTEFORCE_L1 = 3,
BRUTEFORCE_HAMMING = 4,
BRUTEFORCE_HAMMINGLUT = 5,
BRUTEFORCE_SL2 = 6;
//
// C++: Ptr_DescriptorMatcher clone(bool emptyTrainData = false)
//
//javadoc: DescriptorMatcher::clone(emptyTrainData)
public DescriptorMatcher clone(boolean emptyTrainData)
{
DescriptorMatcher retVal = new DescriptorMatcher(clone_0(nativeObj, emptyTrainData));
return retVal;
}
//javadoc: DescriptorMatcher::clone()
public DescriptorMatcher clone()
{
DescriptorMatcher retVal = new DescriptorMatcher(clone_1(nativeObj));
return retVal;
}
//
// C++: static Ptr_DescriptorMatcher create(String descriptorMatcherType)
//
//javadoc: DescriptorMatcher::create(descriptorMatcherType)
public static DescriptorMatcher create(String descriptorMatcherType)
{
DescriptorMatcher retVal = new DescriptorMatcher(create_0(descriptorMatcherType));
return retVal;
}
//
// C++: static Ptr_DescriptorMatcher create(int matcherType)
//
//javadoc: DescriptorMatcher::create(matcherType)
public static DescriptorMatcher create(int matcherType)
{
DescriptorMatcher retVal = new DescriptorMatcher(create_1(matcherType));
return retVal;
}
//
// C++: bool empty()
//
//javadoc: DescriptorMatcher::empty()
public boolean empty()
{
boolean retVal = empty_0(nativeObj);
return retVal;
}
//
// C++: bool isMaskSupported()
//
//javadoc: DescriptorMatcher::isMaskSupported()
public boolean isMaskSupported()
{
boolean retVal = isMaskSupported_0(nativeObj);
return retVal;
}
//
// C++: vector_Mat getTrainDescriptors()
//
//javadoc: DescriptorMatcher::getTrainDescriptors()
public List<Mat> getTrainDescriptors()
{
List<Mat> retVal = new ArrayList<Mat>();
Mat retValMat = new Mat(getTrainDescriptors_0(nativeObj));
Converters.Mat_to_vector_Mat(retValMat, retVal);
return retVal;
}
//
// C++: void add(vector_Mat descriptors)
//
//javadoc: DescriptorMatcher::add(descriptors)
public void add(List<Mat> descriptors)
{
Mat descriptors_mat = Converters.vector_Mat_to_Mat(descriptors);
add_0(nativeObj, descriptors_mat.nativeObj);
return;
}
//
// C++: void clear()
//
//javadoc: DescriptorMatcher::clear()
public void clear()
{
clear_0(nativeObj);
return;
}
//
// C++: void knnMatch(Mat queryDescriptors, Mat trainDescriptors, vector_vector_DMatch& matches, int k, Mat mask = Mat(), bool compactResult = false)
//
//javadoc: DescriptorMatcher::knnMatch(queryDescriptors, trainDescriptors, matches, k, mask, compactResult)
public void knnMatch(Mat queryDescriptors, Mat trainDescriptors, List<MatOfDMatch> matches, int k, Mat mask, boolean compactResult)
{
Mat matches_mat = new Mat();
knnMatch_0(nativeObj, queryDescriptors.nativeObj, trainDescriptors.nativeObj, matches_mat.nativeObj, k, mask.nativeObj, compactResult);
Converters.Mat_to_vector_vector_DMatch(matches_mat, matches);
matches_mat.release();
return;
}
//javadoc: DescriptorMatcher::knnMatch(queryDescriptors, trainDescriptors, matches, k)
public void knnMatch(Mat queryDescriptors, Mat trainDescriptors, List<MatOfDMatch> matches, int k)
{
Mat matches_mat = new Mat();
knnMatch_1(nativeObj, queryDescriptors.nativeObj, trainDescriptors.nativeObj, matches_mat.nativeObj, k);
Converters.Mat_to_vector_vector_DMatch(matches_mat, matches);
matches_mat.release();
return;
}
//
// C++: void knnMatch(Mat queryDescriptors, vector_vector_DMatch& matches, int k, vector_Mat masks = vector_Mat(), bool compactResult = false)
//
//javadoc: DescriptorMatcher::knnMatch(queryDescriptors, matches, k, masks, compactResult)
public void knnMatch(Mat queryDescriptors, List<MatOfDMatch> matches, int k, List<Mat> masks, boolean compactResult)
{
Mat matches_mat = new Mat();
Mat masks_mat = Converters.vector_Mat_to_Mat(masks);
knnMatch_2(nativeObj, queryDescriptors.nativeObj, matches_mat.nativeObj, k, masks_mat.nativeObj, compactResult);
Converters.Mat_to_vector_vector_DMatch(matches_mat, matches);
matches_mat.release();
return;
}
//javadoc: DescriptorMatcher::knnMatch(queryDescriptors, matches, k)
public void knnMatch(Mat queryDescriptors, List<MatOfDMatch> matches, int k)
{
Mat matches_mat = new Mat();
knnMatch_3(nativeObj, queryDescriptors.nativeObj, matches_mat.nativeObj, k);
Converters.Mat_to_vector_vector_DMatch(matches_mat, matches);
matches_mat.release();
return;
}
//
// C++: void match(Mat queryDescriptors, Mat trainDescriptors, vector_DMatch& matches, Mat mask = Mat())
//
//javadoc: DescriptorMatcher::match(queryDescriptors, trainDescriptors, matches, mask)
public void match(Mat queryDescriptors, Mat trainDescriptors, MatOfDMatch matches, Mat mask)
{
Mat matches_mat = matches;
match_0(nativeObj, queryDescriptors.nativeObj, trainDescriptors.nativeObj, matches_mat.nativeObj, mask.nativeObj);
return;
}
//javadoc: DescriptorMatcher::match(queryDescriptors, trainDescriptors, matches)
public void match(Mat queryDescriptors, Mat trainDescriptors, MatOfDMatch matches)
{
Mat matches_mat = matches;
match_1(nativeObj, queryDescriptors.nativeObj, trainDescriptors.nativeObj, matches_mat.nativeObj);
return;
}
//
// C++: void match(Mat queryDescriptors, vector_DMatch& matches, vector_Mat masks = vector_Mat())
//
//javadoc: DescriptorMatcher::match(queryDescriptors, matches, masks)
public void match(Mat queryDescriptors, MatOfDMatch matches, List<Mat> masks)
{
Mat matches_mat = matches;
Mat masks_mat = Converters.vector_Mat_to_Mat(masks);
match_2(nativeObj, queryDescriptors.nativeObj, matches_mat.nativeObj, masks_mat.nativeObj);
return;
}
//javadoc: DescriptorMatcher::match(queryDescriptors, matches)
public void match(Mat queryDescriptors, MatOfDMatch matches)
{
Mat matches_mat = matches;
match_3(nativeObj, queryDescriptors.nativeObj, matches_mat.nativeObj);
return;
}
//
// C++: void radiusMatch(Mat queryDescriptors, Mat trainDescriptors, vector_vector_DMatch& matches, float maxDistance, Mat mask = Mat(), bool compactResult = false)
//
//javadoc: DescriptorMatcher::radiusMatch(queryDescriptors, trainDescriptors, matches, maxDistance, mask, compactResult)
public void radiusMatch(Mat queryDescriptors, Mat trainDescriptors, List<MatOfDMatch> matches, float maxDistance, Mat mask, boolean compactResult)
{
Mat matches_mat = new Mat();
radiusMatch_0(nativeObj, queryDescriptors.nativeObj, trainDescriptors.nativeObj, matches_mat.nativeObj, maxDistance, mask.nativeObj, compactResult);
Converters.Mat_to_vector_vector_DMatch(matches_mat, matches);
matches_mat.release();
return;
}
//javadoc: DescriptorMatcher::radiusMatch(queryDescriptors, trainDescriptors, matches, maxDistance)
public void radiusMatch(Mat queryDescriptors, Mat trainDescriptors, List<MatOfDMatch> matches, float maxDistance)
{
Mat matches_mat = new Mat();
radiusMatch_1(nativeObj, queryDescriptors.nativeObj, trainDescriptors.nativeObj, matches_mat.nativeObj, maxDistance);
Converters.Mat_to_vector_vector_DMatch(matches_mat, matches);
matches_mat.release();
return;
}
//
// C++: void radiusMatch(Mat queryDescriptors, vector_vector_DMatch& matches, float maxDistance, vector_Mat masks = vector_Mat(), bool compactResult = false)
//
//javadoc: DescriptorMatcher::radiusMatch(queryDescriptors, matches, maxDistance, masks, compactResult)
public void radiusMatch(Mat queryDescriptors, List<MatOfDMatch> matches, float maxDistance, List<Mat> masks, boolean compactResult)
{
Mat matches_mat = new Mat();
Mat masks_mat = Converters.vector_Mat_to_Mat(masks);
radiusMatch_2(nativeObj, queryDescriptors.nativeObj, matches_mat.nativeObj, maxDistance, masks_mat.nativeObj, compactResult);
Converters.Mat_to_vector_vector_DMatch(matches_mat, matches);
matches_mat.release();
return;
}
//javadoc: DescriptorMatcher::radiusMatch(queryDescriptors, matches, maxDistance)
public void radiusMatch(Mat queryDescriptors, List<MatOfDMatch> matches, float maxDistance)
{
Mat matches_mat = new Mat();
radiusMatch_3(nativeObj, queryDescriptors.nativeObj, matches_mat.nativeObj, maxDistance);
Converters.Mat_to_vector_vector_DMatch(matches_mat, matches);
matches_mat.release();
return;
}
//
// C++: void read(FileNode arg1)
//
// Unknown type 'FileNode' (I), skipping the function
//
// C++: void read(String fileName)
//
//javadoc: DescriptorMatcher::read(fileName)
public void read(String fileName)
{
read_0(nativeObj, fileName);
return;
}
//
// C++: void train()
//
//javadoc: DescriptorMatcher::train()
public void train()
{
train_0(nativeObj);
return;
}
//
// C++: void write(Ptr_FileStorage fs, String name = String())
//
// Unknown type 'Ptr_FileStorage' (I), skipping the function
//
// C++: void write(String fileName)
//
//javadoc: DescriptorMatcher::write(fileName)
public void write(String fileName)
{
write_0(nativeObj, fileName);
return;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: Ptr_DescriptorMatcher clone(bool emptyTrainData = false)
private static native long clone_0(long nativeObj, boolean emptyTrainData);
private static native long clone_1(long nativeObj);
// C++: static Ptr_DescriptorMatcher create(String descriptorMatcherType)
private static native long create_0(String descriptorMatcherType);
// C++: static Ptr_DescriptorMatcher create(int matcherType)
private static native long create_1(int matcherType);
// C++: bool empty()
private static native boolean empty_0(long nativeObj);
// C++: bool isMaskSupported()
private static native boolean isMaskSupported_0(long nativeObj);
// C++: vector_Mat getTrainDescriptors()
private static native long getTrainDescriptors_0(long nativeObj);
// C++: void add(vector_Mat descriptors)
private static native void add_0(long nativeObj, long descriptors_mat_nativeObj);
// C++: void clear()
private static native void clear_0(long nativeObj);
// C++: void knnMatch(Mat queryDescriptors, Mat trainDescriptors, vector_vector_DMatch& matches, int k, Mat mask = Mat(), bool compactResult = false)
private static native void knnMatch_0(long nativeObj, long queryDescriptors_nativeObj, long trainDescriptors_nativeObj, long matches_mat_nativeObj, int k, long mask_nativeObj, boolean compactResult);
private static native void knnMatch_1(long nativeObj, long queryDescriptors_nativeObj, long trainDescriptors_nativeObj, long matches_mat_nativeObj, int k);
// C++: void knnMatch(Mat queryDescriptors, vector_vector_DMatch& matches, int k, vector_Mat masks = vector_Mat(), bool compactResult = false)
private static native void knnMatch_2(long nativeObj, long queryDescriptors_nativeObj, long matches_mat_nativeObj, int k, long masks_mat_nativeObj, boolean compactResult);
private static native void knnMatch_3(long nativeObj, long queryDescriptors_nativeObj, long matches_mat_nativeObj, int k);
// C++: void match(Mat queryDescriptors, Mat trainDescriptors, vector_DMatch& matches, Mat mask = Mat())
private static native void match_0(long nativeObj, long queryDescriptors_nativeObj, long trainDescriptors_nativeObj, long matches_mat_nativeObj, long mask_nativeObj);
private static native void match_1(long nativeObj, long queryDescriptors_nativeObj, long trainDescriptors_nativeObj, long matches_mat_nativeObj);
// C++: void match(Mat queryDescriptors, vector_DMatch& matches, vector_Mat masks = vector_Mat())
private static native void match_2(long nativeObj, long queryDescriptors_nativeObj, long matches_mat_nativeObj, long masks_mat_nativeObj);
private static native void match_3(long nativeObj, long queryDescriptors_nativeObj, long matches_mat_nativeObj);
// C++: void radiusMatch(Mat queryDescriptors, Mat trainDescriptors, vector_vector_DMatch& matches, float maxDistance, Mat mask = Mat(), bool compactResult = false)
private static native void radiusMatch_0(long nativeObj, long queryDescriptors_nativeObj, long trainDescriptors_nativeObj, long matches_mat_nativeObj, float maxDistance, long mask_nativeObj, boolean compactResult);
private static native void radiusMatch_1(long nativeObj, long queryDescriptors_nativeObj, long trainDescriptors_nativeObj, long matches_mat_nativeObj, float maxDistance);
// C++: void radiusMatch(Mat queryDescriptors, vector_vector_DMatch& matches, float maxDistance, vector_Mat masks = vector_Mat(), bool compactResult = false)
private static native void radiusMatch_2(long nativeObj, long queryDescriptors_nativeObj, long matches_mat_nativeObj, float maxDistance, long masks_mat_nativeObj, boolean compactResult);
private static native void radiusMatch_3(long nativeObj, long queryDescriptors_nativeObj, long matches_mat_nativeObj, float maxDistance);
// C++: void read(String fileName)
private static native void read_0(long nativeObj, String fileName);
// C++: void train()
private static native void train_0(long nativeObj);
// C++: void write(String fileName)
private static native void write_0(long nativeObj, String fileName);
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -1,182 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.features2d;
import java.lang.String;
// C++: class FastFeatureDetector
//javadoc: FastFeatureDetector
public class FastFeatureDetector extends Feature2D {
protected FastFeatureDetector(long addr) { super(addr); }
public static final int
TYPE_5_8 = 0,
TYPE_7_12 = 1,
TYPE_9_16 = 2,
THRESHOLD = 10000,
NONMAX_SUPPRESSION = 10001,
FAST_N = 10002;
//
// C++: static Ptr_FastFeatureDetector create(int threshold = 10, bool nonmaxSuppression = true, int type = FastFeatureDetector::TYPE_9_16)
//
//javadoc: FastFeatureDetector::create(threshold, nonmaxSuppression, type)
public static FastFeatureDetector create(int threshold, boolean nonmaxSuppression, int type)
{
FastFeatureDetector retVal = new FastFeatureDetector(create_0(threshold, nonmaxSuppression, type));
return retVal;
}
//javadoc: FastFeatureDetector::create()
public static FastFeatureDetector create()
{
FastFeatureDetector retVal = new FastFeatureDetector(create_1());
return retVal;
}
//
// C++: String getDefaultName()
//
//javadoc: FastFeatureDetector::getDefaultName()
public String getDefaultName()
{
String retVal = getDefaultName_0(nativeObj);
return retVal;
}
//
// C++: bool getNonmaxSuppression()
//
//javadoc: FastFeatureDetector::getNonmaxSuppression()
public boolean getNonmaxSuppression()
{
boolean retVal = getNonmaxSuppression_0(nativeObj);
return retVal;
}
//
// C++: int getThreshold()
//
//javadoc: FastFeatureDetector::getThreshold()
public int getThreshold()
{
int retVal = getThreshold_0(nativeObj);
return retVal;
}
//
// C++: int getType()
//
//javadoc: FastFeatureDetector::getType()
public int getType()
{
int retVal = getType_0(nativeObj);
return retVal;
}
//
// C++: void setNonmaxSuppression(bool f)
//
//javadoc: FastFeatureDetector::setNonmaxSuppression(f)
public void setNonmaxSuppression(boolean f)
{
setNonmaxSuppression_0(nativeObj, f);
return;
}
//
// C++: void setThreshold(int threshold)
//
//javadoc: FastFeatureDetector::setThreshold(threshold)
public void setThreshold(int threshold)
{
setThreshold_0(nativeObj, threshold);
return;
}
//
// C++: void setType(int type)
//
//javadoc: FastFeatureDetector::setType(type)
public void setType(int type)
{
setType_0(nativeObj, type);
return;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: static Ptr_FastFeatureDetector create(int threshold = 10, bool nonmaxSuppression = true, int type = FastFeatureDetector::TYPE_9_16)
private static native long create_0(int threshold, boolean nonmaxSuppression, int type);
private static native long create_1();
// C++: String getDefaultName()
private static native String getDefaultName_0(long nativeObj);
// C++: bool getNonmaxSuppression()
private static native boolean getNonmaxSuppression_0(long nativeObj);
// C++: int getThreshold()
private static native int getThreshold_0(long nativeObj);
// C++: int getType()
private static native int getType_0(long nativeObj);
// C++: void setNonmaxSuppression(bool f)
private static native void setNonmaxSuppression_0(long nativeObj, boolean f);
// C++: void setThreshold(int threshold)
private static native void setThreshold_0(long nativeObj, int threshold);
// C++: void setType(int type)
private static native void setType_0(long nativeObj, int type);
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -1,292 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.features2d;
import java.lang.String;
import java.util.ArrayList;
import java.util.List;
import org.opencv.core.Algorithm;
import org.opencv.core.Mat;
import org.opencv.core.MatOfKeyPoint;
import org.opencv.utils.Converters;
// C++: class Feature2D
//javadoc: Feature2D
public class Feature2D extends Algorithm {
protected Feature2D(long addr) { super(addr); }
//
// C++: String getDefaultName()
//
//javadoc: Feature2D::getDefaultName()
public String getDefaultName()
{
String retVal = getDefaultName_0(nativeObj);
return retVal;
}
//
// C++: bool empty()
//
//javadoc: Feature2D::empty()
public boolean empty()
{
boolean retVal = empty_0(nativeObj);
return retVal;
}
//
// C++: int defaultNorm()
//
//javadoc: Feature2D::defaultNorm()
public int defaultNorm()
{
int retVal = defaultNorm_0(nativeObj);
return retVal;
}
//
// C++: int descriptorSize()
//
//javadoc: Feature2D::descriptorSize()
public int descriptorSize()
{
int retVal = descriptorSize_0(nativeObj);
return retVal;
}
//
// C++: int descriptorType()
//
//javadoc: Feature2D::descriptorType()
public int descriptorType()
{
int retVal = descriptorType_0(nativeObj);
return retVal;
}
//
// C++: void compute(Mat image, vector_KeyPoint& keypoints, Mat& descriptors)
//
//javadoc: Feature2D::compute(image, keypoints, descriptors)
public void compute(Mat image, MatOfKeyPoint keypoints, Mat descriptors)
{
Mat keypoints_mat = keypoints;
compute_0(nativeObj, image.nativeObj, keypoints_mat.nativeObj, descriptors.nativeObj);
return;
}
//
// C++: void compute(vector_Mat images, vector_vector_KeyPoint& keypoints, vector_Mat& descriptors)
//
//javadoc: Feature2D::compute(images, keypoints, descriptors)
public void compute(List<Mat> images, List<MatOfKeyPoint> keypoints, List<Mat> descriptors)
{
Mat images_mat = Converters.vector_Mat_to_Mat(images);
List<Mat> keypoints_tmplm = new ArrayList<Mat>((keypoints != null) ? keypoints.size() : 0);
Mat keypoints_mat = Converters.vector_vector_KeyPoint_to_Mat(keypoints, keypoints_tmplm);
Mat descriptors_mat = new Mat();
compute_1(nativeObj, images_mat.nativeObj, keypoints_mat.nativeObj, descriptors_mat.nativeObj);
Converters.Mat_to_vector_vector_KeyPoint(keypoints_mat, keypoints);
keypoints_mat.release();
Converters.Mat_to_vector_Mat(descriptors_mat, descriptors);
descriptors_mat.release();
return;
}
//
// C++: void detect(Mat image, vector_KeyPoint& keypoints, Mat mask = Mat())
//
//javadoc: Feature2D::detect(image, keypoints, mask)
public void detect(Mat image, MatOfKeyPoint keypoints, Mat mask)
{
Mat keypoints_mat = keypoints;
detect_0(nativeObj, image.nativeObj, keypoints_mat.nativeObj, mask.nativeObj);
return;
}
//javadoc: Feature2D::detect(image, keypoints)
public void detect(Mat image, MatOfKeyPoint keypoints)
{
Mat keypoints_mat = keypoints;
detect_1(nativeObj, image.nativeObj, keypoints_mat.nativeObj);
return;
}
//
// C++: void detect(vector_Mat images, vector_vector_KeyPoint& keypoints, vector_Mat masks = vector_Mat())
//
//javadoc: Feature2D::detect(images, keypoints, masks)
public void detect(List<Mat> images, List<MatOfKeyPoint> keypoints, List<Mat> masks)
{
Mat images_mat = Converters.vector_Mat_to_Mat(images);
Mat keypoints_mat = new Mat();
Mat masks_mat = Converters.vector_Mat_to_Mat(masks);
detect_2(nativeObj, images_mat.nativeObj, keypoints_mat.nativeObj, masks_mat.nativeObj);
Converters.Mat_to_vector_vector_KeyPoint(keypoints_mat, keypoints);
keypoints_mat.release();
return;
}
//javadoc: Feature2D::detect(images, keypoints)
public void detect(List<Mat> images, List<MatOfKeyPoint> keypoints)
{
Mat images_mat = Converters.vector_Mat_to_Mat(images);
Mat keypoints_mat = new Mat();
detect_3(nativeObj, images_mat.nativeObj, keypoints_mat.nativeObj);
Converters.Mat_to_vector_vector_KeyPoint(keypoints_mat, keypoints);
keypoints_mat.release();
return;
}
//
// C++: void detectAndCompute(Mat image, Mat mask, vector_KeyPoint& keypoints, Mat& descriptors, bool useProvidedKeypoints = false)
//
//javadoc: Feature2D::detectAndCompute(image, mask, keypoints, descriptors, useProvidedKeypoints)
public void detectAndCompute(Mat image, Mat mask, MatOfKeyPoint keypoints, Mat descriptors, boolean useProvidedKeypoints)
{
Mat keypoints_mat = keypoints;
detectAndCompute_0(nativeObj, image.nativeObj, mask.nativeObj, keypoints_mat.nativeObj, descriptors.nativeObj, useProvidedKeypoints);
return;
}
//javadoc: Feature2D::detectAndCompute(image, mask, keypoints, descriptors)
public void detectAndCompute(Mat image, Mat mask, MatOfKeyPoint keypoints, Mat descriptors)
{
Mat keypoints_mat = keypoints;
detectAndCompute_1(nativeObj, image.nativeObj, mask.nativeObj, keypoints_mat.nativeObj, descriptors.nativeObj);
return;
}
//
// C++: void read(FileNode arg1)
//
// Unknown type 'FileNode' (I), skipping the function
//
// C++: void read(String fileName)
//
//javadoc: Feature2D::read(fileName)
public void read(String fileName)
{
read_0(nativeObj, fileName);
return;
}
//
// C++: void write(Ptr_FileStorage fs, String name = String())
//
// Unknown type 'Ptr_FileStorage' (I), skipping the function
//
// C++: void write(String fileName)
//
//javadoc: Feature2D::write(fileName)
public void write(String fileName)
{
write_0(nativeObj, fileName);
return;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: String getDefaultName()
private static native String getDefaultName_0(long nativeObj);
// C++: bool empty()
private static native boolean empty_0(long nativeObj);
// C++: int defaultNorm()
private static native int defaultNorm_0(long nativeObj);
// C++: int descriptorSize()
private static native int descriptorSize_0(long nativeObj);
// C++: int descriptorType()
private static native int descriptorType_0(long nativeObj);
// C++: void compute(Mat image, vector_KeyPoint& keypoints, Mat& descriptors)
private static native void compute_0(long nativeObj, long image_nativeObj, long keypoints_mat_nativeObj, long descriptors_nativeObj);
// C++: void compute(vector_Mat images, vector_vector_KeyPoint& keypoints, vector_Mat& descriptors)
private static native void compute_1(long nativeObj, long images_mat_nativeObj, long keypoints_mat_nativeObj, long descriptors_mat_nativeObj);
// C++: void detect(Mat image, vector_KeyPoint& keypoints, Mat mask = Mat())
private static native void detect_0(long nativeObj, long image_nativeObj, long keypoints_mat_nativeObj, long mask_nativeObj);
private static native void detect_1(long nativeObj, long image_nativeObj, long keypoints_mat_nativeObj);
// C++: void detect(vector_Mat images, vector_vector_KeyPoint& keypoints, vector_Mat masks = vector_Mat())
private static native void detect_2(long nativeObj, long images_mat_nativeObj, long keypoints_mat_nativeObj, long masks_mat_nativeObj);
private static native void detect_3(long nativeObj, long images_mat_nativeObj, long keypoints_mat_nativeObj);
// C++: void detectAndCompute(Mat image, Mat mask, vector_KeyPoint& keypoints, Mat& descriptors, bool useProvidedKeypoints = false)
private static native void detectAndCompute_0(long nativeObj, long image_nativeObj, long mask_nativeObj, long keypoints_mat_nativeObj, long descriptors_nativeObj, boolean useProvidedKeypoints);
private static native void detectAndCompute_1(long nativeObj, long image_nativeObj, long mask_nativeObj, long keypoints_mat_nativeObj, long descriptors_nativeObj);
// C++: void read(String fileName)
private static native void read_0(long nativeObj, String fileName);
// C++: void write(String fileName)
private static native void write_0(long nativeObj, String fileName);
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -1,219 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.features2d;
import java.lang.String;
import java.util.ArrayList;
import java.util.List;
import org.opencv.core.Mat;
import org.opencv.core.MatOfKeyPoint;
import org.opencv.utils.Converters;
// C++: class javaFeatureDetector
//javadoc: javaFeatureDetector
@Deprecated
public class FeatureDetector {
protected final long nativeObj;
protected FeatureDetector(long addr) { nativeObj = addr; }
public long getNativeObjAddr() { return nativeObj; }
private static final int
GRIDDETECTOR = 1000,
PYRAMIDDETECTOR = 2000,
DYNAMICDETECTOR = 3000;
public static final int
FAST = 1,
STAR = 2,
SIFT = 3,
SURF = 4,
ORB = 5,
MSER = 6,
GFTT = 7,
HARRIS = 8,
SIMPLEBLOB = 9,
DENSE = 10,
BRISK = 11,
AKAZE = 12,
GRID_FAST = GRIDDETECTOR + FAST,
GRID_STAR = GRIDDETECTOR + STAR,
GRID_SIFT = GRIDDETECTOR + SIFT,
GRID_SURF = GRIDDETECTOR + SURF,
GRID_ORB = GRIDDETECTOR + ORB,
GRID_MSER = GRIDDETECTOR + MSER,
GRID_GFTT = GRIDDETECTOR + GFTT,
GRID_HARRIS = GRIDDETECTOR + HARRIS,
GRID_SIMPLEBLOB = GRIDDETECTOR + SIMPLEBLOB,
GRID_DENSE = GRIDDETECTOR + DENSE,
GRID_BRISK = GRIDDETECTOR + BRISK,
GRID_AKAZE = GRIDDETECTOR + AKAZE,
PYRAMID_FAST = PYRAMIDDETECTOR + FAST,
PYRAMID_STAR = PYRAMIDDETECTOR + STAR,
PYRAMID_SIFT = PYRAMIDDETECTOR + SIFT,
PYRAMID_SURF = PYRAMIDDETECTOR + SURF,
PYRAMID_ORB = PYRAMIDDETECTOR + ORB,
PYRAMID_MSER = PYRAMIDDETECTOR + MSER,
PYRAMID_GFTT = PYRAMIDDETECTOR + GFTT,
PYRAMID_HARRIS = PYRAMIDDETECTOR + HARRIS,
PYRAMID_SIMPLEBLOB = PYRAMIDDETECTOR + SIMPLEBLOB,
PYRAMID_DENSE = PYRAMIDDETECTOR + DENSE,
PYRAMID_BRISK = PYRAMIDDETECTOR + BRISK,
PYRAMID_AKAZE = PYRAMIDDETECTOR + AKAZE,
DYNAMIC_FAST = DYNAMICDETECTOR + FAST,
DYNAMIC_STAR = DYNAMICDETECTOR + STAR,
DYNAMIC_SIFT = DYNAMICDETECTOR + SIFT,
DYNAMIC_SURF = DYNAMICDETECTOR + SURF,
DYNAMIC_ORB = DYNAMICDETECTOR + ORB,
DYNAMIC_MSER = DYNAMICDETECTOR + MSER,
DYNAMIC_GFTT = DYNAMICDETECTOR + GFTT,
DYNAMIC_HARRIS = DYNAMICDETECTOR + HARRIS,
DYNAMIC_SIMPLEBLOB = DYNAMICDETECTOR + SIMPLEBLOB,
DYNAMIC_DENSE = DYNAMICDETECTOR + DENSE,
DYNAMIC_BRISK = DYNAMICDETECTOR + BRISK,
DYNAMIC_AKAZE = DYNAMICDETECTOR + AKAZE;
//
// C++: static Ptr_javaFeatureDetector create(int detectorType)
//
//javadoc: javaFeatureDetector::create(detectorType)
@Deprecated
public static FeatureDetector create(int detectorType)
{
FeatureDetector retVal = new FeatureDetector(create_0(detectorType));
return retVal;
}
//
// C++: bool empty()
//
//javadoc: javaFeatureDetector::empty()
public boolean empty()
{
boolean retVal = empty_0(nativeObj);
return retVal;
}
//
// C++: void detect(Mat image, vector_KeyPoint& keypoints, Mat mask = Mat())
//
//javadoc: javaFeatureDetector::detect(image, keypoints, mask)
public void detect(Mat image, MatOfKeyPoint keypoints, Mat mask)
{
Mat keypoints_mat = keypoints;
detect_0(nativeObj, image.nativeObj, keypoints_mat.nativeObj, mask.nativeObj);
return;
}
//javadoc: javaFeatureDetector::detect(image, keypoints)
public void detect(Mat image, MatOfKeyPoint keypoints)
{
Mat keypoints_mat = keypoints;
detect_1(nativeObj, image.nativeObj, keypoints_mat.nativeObj);
return;
}
//
// C++: void detect(vector_Mat images, vector_vector_KeyPoint& keypoints, vector_Mat masks = std::vector<Mat>())
//
//javadoc: javaFeatureDetector::detect(images, keypoints, masks)
public void detect(List<Mat> images, List<MatOfKeyPoint> keypoints, List<Mat> masks)
{
Mat images_mat = Converters.vector_Mat_to_Mat(images);
Mat keypoints_mat = new Mat();
Mat masks_mat = Converters.vector_Mat_to_Mat(masks);
detect_2(nativeObj, images_mat.nativeObj, keypoints_mat.nativeObj, masks_mat.nativeObj);
Converters.Mat_to_vector_vector_KeyPoint(keypoints_mat, keypoints);
keypoints_mat.release();
return;
}
//javadoc: javaFeatureDetector::detect(images, keypoints)
public void detect(List<Mat> images, List<MatOfKeyPoint> keypoints)
{
Mat images_mat = Converters.vector_Mat_to_Mat(images);
Mat keypoints_mat = new Mat();
detect_3(nativeObj, images_mat.nativeObj, keypoints_mat.nativeObj);
Converters.Mat_to_vector_vector_KeyPoint(keypoints_mat, keypoints);
keypoints_mat.release();
return;
}
//
// C++: void read(String fileName)
//
//javadoc: javaFeatureDetector::read(fileName)
public void read(String fileName)
{
read_0(nativeObj, fileName);
return;
}
//
// C++: void write(String fileName)
//
//javadoc: javaFeatureDetector::write(fileName)
public void write(String fileName)
{
write_0(nativeObj, fileName);
return;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: static Ptr_javaFeatureDetector create(int detectorType)
private static native long create_0(int detectorType);
// C++: bool empty()
private static native boolean empty_0(long nativeObj);
// C++: void detect(Mat image, vector_KeyPoint& keypoints, Mat mask = Mat())
private static native void detect_0(long nativeObj, long image_nativeObj, long keypoints_mat_nativeObj, long mask_nativeObj);
private static native void detect_1(long nativeObj, long image_nativeObj, long keypoints_mat_nativeObj);
// C++: void detect(vector_Mat images, vector_vector_KeyPoint& keypoints, vector_Mat masks = std::vector<Mat>())
private static native void detect_2(long nativeObj, long images_mat_nativeObj, long keypoints_mat_nativeObj, long masks_mat_nativeObj);
private static native void detect_3(long nativeObj, long images_mat_nativeObj, long keypoints_mat_nativeObj);
// C++: void read(String fileName)
private static native void read_0(long nativeObj, String fileName);
// C++: void write(String fileName)
private static native void write_0(long nativeObj, String fileName);
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -1,158 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.features2d;
import java.util.ArrayList;
import java.util.List;
import org.opencv.core.Mat;
import org.opencv.core.MatOfByte;
import org.opencv.core.MatOfDMatch;
import org.opencv.core.MatOfKeyPoint;
import org.opencv.core.Scalar;
import org.opencv.utils.Converters;
// C++: class Features2d
//javadoc: Features2d
public class Features2d {
public static final int
DRAW_OVER_OUTIMG = 1,
NOT_DRAW_SINGLE_POINTS = 2,
DRAW_RICH_KEYPOINTS = 4;
//
// C++: void drawKeypoints(Mat image, vector_KeyPoint keypoints, Mat& outImage, Scalar color = Scalar::all(-1), int flags = DrawMatchesFlags::DEFAULT)
//
//javadoc: drawKeypoints(image, keypoints, outImage, color, flags)
public static void drawKeypoints(Mat image, MatOfKeyPoint keypoints, Mat outImage, Scalar color, int flags)
{
Mat keypoints_mat = keypoints;
drawKeypoints_0(image.nativeObj, keypoints_mat.nativeObj, outImage.nativeObj, color.val[0], color.val[1], color.val[2], color.val[3], flags);
return;
}
//javadoc: drawKeypoints(image, keypoints, outImage)
public static void drawKeypoints(Mat image, MatOfKeyPoint keypoints, Mat outImage)
{
Mat keypoints_mat = keypoints;
drawKeypoints_1(image.nativeObj, keypoints_mat.nativeObj, outImage.nativeObj);
return;
}
//
// C++: void drawMatches(Mat img1, vector_KeyPoint keypoints1, Mat img2, vector_KeyPoint keypoints2, vector_DMatch matches1to2, Mat& outImg, Scalar matchColor = Scalar::all(-1), Scalar singlePointColor = Scalar::all(-1), vector_char matchesMask = std::vector<char>(), int flags = DrawMatchesFlags::DEFAULT)
//
//javadoc: drawMatches(img1, keypoints1, img2, keypoints2, matches1to2, outImg, matchColor, singlePointColor, matchesMask, flags)
public static void drawMatches(Mat img1, MatOfKeyPoint keypoints1, Mat img2, MatOfKeyPoint keypoints2, MatOfDMatch matches1to2, Mat outImg, Scalar matchColor, Scalar singlePointColor, MatOfByte matchesMask, int flags)
{
Mat keypoints1_mat = keypoints1;
Mat keypoints2_mat = keypoints2;
Mat matches1to2_mat = matches1to2;
Mat matchesMask_mat = matchesMask;
drawMatches_0(img1.nativeObj, keypoints1_mat.nativeObj, img2.nativeObj, keypoints2_mat.nativeObj, matches1to2_mat.nativeObj, outImg.nativeObj, matchColor.val[0], matchColor.val[1], matchColor.val[2], matchColor.val[3], singlePointColor.val[0], singlePointColor.val[1], singlePointColor.val[2], singlePointColor.val[3], matchesMask_mat.nativeObj, flags);
return;
}
//javadoc: drawMatches(img1, keypoints1, img2, keypoints2, matches1to2, outImg)
public static void drawMatches(Mat img1, MatOfKeyPoint keypoints1, Mat img2, MatOfKeyPoint keypoints2, MatOfDMatch matches1to2, Mat outImg)
{
Mat keypoints1_mat = keypoints1;
Mat keypoints2_mat = keypoints2;
Mat matches1to2_mat = matches1to2;
drawMatches_1(img1.nativeObj, keypoints1_mat.nativeObj, img2.nativeObj, keypoints2_mat.nativeObj, matches1to2_mat.nativeObj, outImg.nativeObj);
return;
}
//
// C++: void drawMatches(Mat img1, vector_KeyPoint keypoints1, Mat img2, vector_KeyPoint keypoints2, vector_vector_DMatch matches1to2, Mat outImg, Scalar matchColor = Scalar::all(-1), Scalar singlePointColor = Scalar::all(-1), vector_vector_char matchesMask = std::vector<std::vector<char> >(), int flags = 0)
//
//javadoc: drawMatches(img1, keypoints1, img2, keypoints2, matches1to2, outImg, matchColor, singlePointColor, matchesMask, flags)
public static void drawMatches2(Mat img1, MatOfKeyPoint keypoints1, Mat img2, MatOfKeyPoint keypoints2, List<MatOfDMatch> matches1to2, Mat outImg, Scalar matchColor, Scalar singlePointColor, List<MatOfByte> matchesMask, int flags)
{
Mat keypoints1_mat = keypoints1;
Mat keypoints2_mat = keypoints2;
List<Mat> matches1to2_tmplm = new ArrayList<Mat>((matches1to2 != null) ? matches1to2.size() : 0);
Mat matches1to2_mat = Converters.vector_vector_DMatch_to_Mat(matches1to2, matches1to2_tmplm);
List<Mat> matchesMask_tmplm = new ArrayList<Mat>((matchesMask != null) ? matchesMask.size() : 0);
Mat matchesMask_mat = Converters.vector_vector_char_to_Mat(matchesMask, matchesMask_tmplm);
drawMatches2_0(img1.nativeObj, keypoints1_mat.nativeObj, img2.nativeObj, keypoints2_mat.nativeObj, matches1to2_mat.nativeObj, outImg.nativeObj, matchColor.val[0], matchColor.val[1], matchColor.val[2], matchColor.val[3], singlePointColor.val[0], singlePointColor.val[1], singlePointColor.val[2], singlePointColor.val[3], matchesMask_mat.nativeObj, flags);
return;
}
//javadoc: drawMatches(img1, keypoints1, img2, keypoints2, matches1to2, outImg)
public static void drawMatches2(Mat img1, MatOfKeyPoint keypoints1, Mat img2, MatOfKeyPoint keypoints2, List<MatOfDMatch> matches1to2, Mat outImg)
{
Mat keypoints1_mat = keypoints1;
Mat keypoints2_mat = keypoints2;
List<Mat> matches1to2_tmplm = new ArrayList<Mat>((matches1to2 != null) ? matches1to2.size() : 0);
Mat matches1to2_mat = Converters.vector_vector_DMatch_to_Mat(matches1to2, matches1to2_tmplm);
drawMatches2_1(img1.nativeObj, keypoints1_mat.nativeObj, img2.nativeObj, keypoints2_mat.nativeObj, matches1to2_mat.nativeObj, outImg.nativeObj);
return;
}
//
// C++: void drawMatches(Mat img1, vector_KeyPoint keypoints1, Mat img2, vector_KeyPoint keypoints2, vector_vector_DMatch matches1to2, Mat& outImg, Scalar matchColor = Scalar::all(-1), Scalar singlePointColor = Scalar::all(-1), vector_vector_char matchesMask = std::vector<std::vector<char> >(), int flags = DrawMatchesFlags::DEFAULT)
//
//javadoc: drawMatches(img1, keypoints1, img2, keypoints2, matches1to2, outImg, matchColor, singlePointColor, matchesMask, flags)
public static void drawMatchesKnn(Mat img1, MatOfKeyPoint keypoints1, Mat img2, MatOfKeyPoint keypoints2, List<MatOfDMatch> matches1to2, Mat outImg, Scalar matchColor, Scalar singlePointColor, List<MatOfByte> matchesMask, int flags)
{
Mat keypoints1_mat = keypoints1;
Mat keypoints2_mat = keypoints2;
List<Mat> matches1to2_tmplm = new ArrayList<Mat>((matches1to2 != null) ? matches1to2.size() : 0);
Mat matches1to2_mat = Converters.vector_vector_DMatch_to_Mat(matches1to2, matches1to2_tmplm);
List<Mat> matchesMask_tmplm = new ArrayList<Mat>((matchesMask != null) ? matchesMask.size() : 0);
Mat matchesMask_mat = Converters.vector_vector_char_to_Mat(matchesMask, matchesMask_tmplm);
drawMatchesKnn_0(img1.nativeObj, keypoints1_mat.nativeObj, img2.nativeObj, keypoints2_mat.nativeObj, matches1to2_mat.nativeObj, outImg.nativeObj, matchColor.val[0], matchColor.val[1], matchColor.val[2], matchColor.val[3], singlePointColor.val[0], singlePointColor.val[1], singlePointColor.val[2], singlePointColor.val[3], matchesMask_mat.nativeObj, flags);
return;
}
//javadoc: drawMatches(img1, keypoints1, img2, keypoints2, matches1to2, outImg)
public static void drawMatchesKnn(Mat img1, MatOfKeyPoint keypoints1, Mat img2, MatOfKeyPoint keypoints2, List<MatOfDMatch> matches1to2, Mat outImg)
{
Mat keypoints1_mat = keypoints1;
Mat keypoints2_mat = keypoints2;
List<Mat> matches1to2_tmplm = new ArrayList<Mat>((matches1to2 != null) ? matches1to2.size() : 0);
Mat matches1to2_mat = Converters.vector_vector_DMatch_to_Mat(matches1to2, matches1to2_tmplm);
drawMatchesKnn_1(img1.nativeObj, keypoints1_mat.nativeObj, img2.nativeObj, keypoints2_mat.nativeObj, matches1to2_mat.nativeObj, outImg.nativeObj);
return;
}
// C++: void drawKeypoints(Mat image, vector_KeyPoint keypoints, Mat& outImage, Scalar color = Scalar::all(-1), int flags = DrawMatchesFlags::DEFAULT)
private static native void drawKeypoints_0(long image_nativeObj, long keypoints_mat_nativeObj, long outImage_nativeObj, double color_val0, double color_val1, double color_val2, double color_val3, int flags);
private static native void drawKeypoints_1(long image_nativeObj, long keypoints_mat_nativeObj, long outImage_nativeObj);
// C++: void drawMatches(Mat img1, vector_KeyPoint keypoints1, Mat img2, vector_KeyPoint keypoints2, vector_DMatch matches1to2, Mat& outImg, Scalar matchColor = Scalar::all(-1), Scalar singlePointColor = Scalar::all(-1), vector_char matchesMask = std::vector<char>(), int flags = DrawMatchesFlags::DEFAULT)
private static native void drawMatches_0(long img1_nativeObj, long keypoints1_mat_nativeObj, long img2_nativeObj, long keypoints2_mat_nativeObj, long matches1to2_mat_nativeObj, long outImg_nativeObj, double matchColor_val0, double matchColor_val1, double matchColor_val2, double matchColor_val3, double singlePointColor_val0, double singlePointColor_val1, double singlePointColor_val2, double singlePointColor_val3, long matchesMask_mat_nativeObj, int flags);
private static native void drawMatches_1(long img1_nativeObj, long keypoints1_mat_nativeObj, long img2_nativeObj, long keypoints2_mat_nativeObj, long matches1to2_mat_nativeObj, long outImg_nativeObj);
// C++: void drawMatches(Mat img1, vector_KeyPoint keypoints1, Mat img2, vector_KeyPoint keypoints2, vector_vector_DMatch matches1to2, Mat outImg, Scalar matchColor = Scalar::all(-1), Scalar singlePointColor = Scalar::all(-1), vector_vector_char matchesMask = std::vector<std::vector<char> >(), int flags = 0)
private static native void drawMatches2_0(long img1_nativeObj, long keypoints1_mat_nativeObj, long img2_nativeObj, long keypoints2_mat_nativeObj, long matches1to2_mat_nativeObj, long outImg_nativeObj, double matchColor_val0, double matchColor_val1, double matchColor_val2, double matchColor_val3, double singlePointColor_val0, double singlePointColor_val1, double singlePointColor_val2, double singlePointColor_val3, long matchesMask_mat_nativeObj, int flags);
private static native void drawMatches2_1(long img1_nativeObj, long keypoints1_mat_nativeObj, long img2_nativeObj, long keypoints2_mat_nativeObj, long matches1to2_mat_nativeObj, long outImg_nativeObj);
// C++: void drawMatches(Mat img1, vector_KeyPoint keypoints1, Mat img2, vector_KeyPoint keypoints2, vector_vector_DMatch matches1to2, Mat& outImg, Scalar matchColor = Scalar::all(-1), Scalar singlePointColor = Scalar::all(-1), vector_vector_char matchesMask = std::vector<std::vector<char> >(), int flags = DrawMatchesFlags::DEFAULT)
private static native void drawMatchesKnn_0(long img1_nativeObj, long keypoints1_mat_nativeObj, long img2_nativeObj, long keypoints2_mat_nativeObj, long matches1to2_mat_nativeObj, long outImg_nativeObj, double matchColor_val0, double matchColor_val1, double matchColor_val2, double matchColor_val3, double singlePointColor_val0, double singlePointColor_val1, double singlePointColor_val2, double singlePointColor_val3, long matchesMask_mat_nativeObj, int flags);
private static native void drawMatchesKnn_1(long img1_nativeObj, long keypoints1_mat_nativeObj, long img2_nativeObj, long keypoints2_mat_nativeObj, long matches1to2_mat_nativeObj, long outImg_nativeObj);
}

@ -1,61 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.features2d;
// C++: class FlannBasedMatcher
//javadoc: FlannBasedMatcher
public class FlannBasedMatcher extends DescriptorMatcher {
protected FlannBasedMatcher(long addr) { super(addr); }
//
// C++: FlannBasedMatcher(Ptr_flann_IndexParams indexParams = makePtr<flann::KDTreeIndexParams>(), Ptr_flann_SearchParams searchParams = makePtr<flann::SearchParams>())
//
//javadoc: FlannBasedMatcher::FlannBasedMatcher()
public FlannBasedMatcher()
{
super( FlannBasedMatcher_0() );
return;
}
//
// C++: static Ptr_FlannBasedMatcher create()
//
//javadoc: FlannBasedMatcher::create()
public static FlannBasedMatcher create()
{
FlannBasedMatcher retVal = new FlannBasedMatcher(create_0());
return retVal;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: FlannBasedMatcher(Ptr_flann_IndexParams indexParams = makePtr<flann::KDTreeIndexParams>(), Ptr_flann_SearchParams searchParams = makePtr<flann::SearchParams>())
private static native long FlannBasedMatcher_0();
// C++: static Ptr_FlannBasedMatcher create()
private static native long create_0();
// native support for java finalize()
private static native void delete(long nativeObj);
}

@ -1,302 +0,0 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.features2d;
import java.lang.String;
// C++: class GFTTDetector
//javadoc: GFTTDetector
public class GFTTDetector extends Feature2D {
protected GFTTDetector(long addr) { super(addr); }
//
// C++: static Ptr_GFTTDetector create(int maxCorners, double qualityLevel, double minDistance, int blockSize, int gradiantSize, bool useHarrisDetector = false, double k = 0.04)
//
//javadoc: GFTTDetector::create(maxCorners, qualityLevel, minDistance, blockSize, gradiantSize, useHarrisDetector, k)
public static GFTTDetector create(int maxCorners, double qualityLevel, double minDistance, int blockSize, int gradiantSize, boolean useHarrisDetector, double k)
{
GFTTDetector retVal = new GFTTDetector(create_0(maxCorners, qualityLevel, minDistance, blockSize, gradiantSize, useHarrisDetector, k));
return retVal;
}
//javadoc: GFTTDetector::create(maxCorners, qualityLevel, minDistance, blockSize, gradiantSize)
public static GFTTDetector create(int maxCorners, double qualityLevel, double minDistance, int blockSize, int gradiantSize)
{
GFTTDetector retVal = new GFTTDetector(create_1(maxCorners, qualityLevel, minDistance, blockSize, gradiantSize));
return retVal;
}
//
// C++: static Ptr_GFTTDetector create(int maxCorners = 1000, double qualityLevel = 0.01, double minDistance = 1, int blockSize = 3, bool useHarrisDetector = false, double k = 0.04)
//
//javadoc: GFTTDetector::create(maxCorners, qualityLevel, minDistance, blockSize, useHarrisDetector, k)
public static GFTTDetector create(int maxCorners, double qualityLevel, double minDistance, int blockSize, boolean useHarrisDetector, double k)
{
GFTTDetector retVal = new GFTTDetector(create_2(maxCorners, qualityLevel, minDistance, blockSize, useHarrisDetector, k));
return retVal;
}
//javadoc: GFTTDetector::create()
public static GFTTDetector create()
{
GFTTDetector retVal = new GFTTDetector(create_3());
return retVal;
}
//
// C++: String getDefaultName()
//
//javadoc: GFTTDetector::getDefaultName()
public String getDefaultName()
{
String retVal = getDefaultName_0(nativeObj);
return retVal;
}
//
// C++: bool getHarrisDetector()
//
//javadoc: GFTTDetector::getHarrisDetector()
public boolean getHarrisDetector()
{
boolean retVal = getHarrisDetector_0(nativeObj);
return retVal;
}
//
// C++: double getK()
//
//javadoc: GFTTDetector::getK()
public double getK()
{
double retVal = getK_0(nativeObj);
return retVal;
}
//
// C++: double getMinDistance()
//
//javadoc: GFTTDetector::getMinDistance()
public double getMinDistance()
{
double retVal = getMinDistance_0(nativeObj);
return retVal;
}
//
// C++: double getQualityLevel()
//
//javadoc: GFTTDetector::getQualityLevel()
public double getQualityLevel()
{
double retVal = getQualityLevel_0(nativeObj);
return retVal;
}
//
// C++: int getBlockSize()
//
//javadoc: GFTTDetector::getBlockSize()
public int getBlockSize()
{
int retVal = getBlockSize_0(nativeObj);
return retVal;
}
//
// C++: int getMaxFeatures()
//
//javadoc: GFTTDetector::getMaxFeatures()
public int getMaxFeatures()
{
int retVal = getMaxFeatures_0(nativeObj);
return retVal;
}
//
// C++: void setBlockSize(int blockSize)
//
//javadoc: GFTTDetector::setBlockSize(blockSize)
public void setBlockSize(int blockSize)
{
setBlockSize_0(nativeObj, blockSize);
return;
}
//
// C++: void setHarrisDetector(bool val)
//
//javadoc: GFTTDetector::setHarrisDetector(val)
public void setHarrisDetector(boolean val)
{
setHarrisDetector_0(nativeObj, val);
return;
}
//
// C++: void setK(double k)
//
//javadoc: GFTTDetector::setK(k)
public void setK(double k)
{
setK_0(nativeObj, k);
return;
}
//
// C++: void setMaxFeatures(int maxFeatures)
//
//javadoc: GFTTDetector::setMaxFeatures(maxFeatures)
public void setMaxFeatures(int maxFeatures)
{
setMaxFeatures_0(nativeObj, maxFeatures);
return;
}
//
// C++: void setMinDistance(double minDistance)
//
//javadoc: GFTTDetector::setMinDistance(minDistance)
public void setMinDistance(double minDistance)
{
setMinDistance_0(nativeObj, minDistance);
return;
}
//
// C++: void setQualityLevel(double qlevel)
//
//javadoc: GFTTDetector::setQualityLevel(qlevel)
public void setQualityLevel(double qlevel)
{
setQualityLevel_0(nativeObj, qlevel);
return;
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: static Ptr_GFTTDetector create(int maxCorners, double qualityLevel, double minDistance, int blockSize, int gradiantSize, bool useHarrisDetector = false, double k = 0.04)
private static native long create_0(int maxCorners, double qualityLevel, double minDistance, int blockSize, int gradiantSize, boolean useHarrisDetector, double k);
private static native long create_1(int maxCorners, double qualityLevel, double minDistance, int blockSize, int gradiantSize);
// C++: static Ptr_GFTTDetector create(int maxCorners = 1000, double qualityLevel = 0.01, double minDistance = 1, int blockSize = 3, bool useHarrisDetector = false, double k = 0.04)
private static native long create_2(int maxCorners, double qualityLevel, double minDistance, int blockSize, boolean useHarrisDetector, double k);
private static native long create_3();
// C++: String getDefaultName()
private static native String getDefaultName_0(long nativeObj);
// C++: bool getHarrisDetector()
private static native boolean getHarrisDetector_0(long nativeObj);
// C++: double getK()
private static native double getK_0(long nativeObj);
// C++: double getMinDistance()
private static native double getMinDistance_0(long nativeObj);
// C++: double getQualityLevel()
private static native double getQualityLevel_0(long nativeObj);
// C++: int getBlockSize()
private static native int getBlockSize_0(long nativeObj);
// C++: int getMaxFeatures()
private static native int getMaxFeatures_0(long nativeObj);
// C++: void setBlockSize(int blockSize)
private static native void setBlockSize_0(long nativeObj, int blockSize);
// C++: void setHarrisDetector(bool val)
private static native void setHarrisDetector_0(long nativeObj, boolean val);
// C++: void setK(double k)
private static native void setK_0(long nativeObj, double k);
// C++: void setMaxFeatures(int maxFeatures)
private static native void setMaxFeatures_0(long nativeObj, int maxFeatures);
// C++: void setMinDistance(double minDistance)
private static native void setMinDistance_0(long nativeObj, double minDistance);
// C++: void setQualityLevel(double qlevel)
private static native void setQualityLevel_0(long nativeObj, double qlevel);
// native support for java finalize()
private static native void delete(long nativeObj);
}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save