Merge 2.4.3-rc

This commit is contained in:
Andrey Kamaev 2012-10-24 19:56:27 +04:00
commit 6211f156e6
93 changed files with 2844 additions and 4117 deletions

View File

@ -95,6 +95,8 @@ ocv_warnings_disable(CMAKE_CXX_FLAGS /wd4244) # vs2008
ocv_warnings_disable(CMAKE_CXX_FLAGS /wd4267 /wd4305 /wd4306) # vs2008 Win64
ocv_warnings_disable(CMAKE_CXX_FLAGS /wd4703) # vs2012
ocv_warnings_disable(CMAKE_C_FLAGS /wd4267 /wd4244 /wd4018)
if(UNIX AND (CMAKE_COMPILER_IS_GNUCXX OR CV_ICC))
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -fPIC")
endif()

View File

@ -8,7 +8,7 @@ LOCAL_LOG_PATH = os.path.join(os.getcwd(), "logs")
if (__name__ == "__main__"):
if (not os.path.exists(LOCAL_LOG_PATH)):
os.makedirs(LOCAL_LOG_PATH)
os.makedirs(LOCAL_LOG_PATH)
print("Building native part of OpenCV Manager...")
HomeDir = os.getcwd()
@ -19,25 +19,25 @@ if (__name__ == "__main__"):
#print(BuildCommand)
res = os.system(BuildCommand)
if (0 == res):
print("Build\t[OK]")
print("Build\t[OK]")
else:
print("Build\t[FAILED]")
print("Build\t[FAILED]")
sys.exit(-1)
os.chdir(HomeDir)
ConfFile = open("device.conf", "rt")
for s in ConfFile.readlines():
keys = s.split(";")
if (len(keys) < 2):
print("Error: invalid config line: \"%s\"" % s)
continue
Arch = keys[0]
Name = keys[1]
print("testing \"%s\" arch" % Arch)
print("Pushing to device \"%s\"" % Name)
PushCommand = "%s \"%s\" \"%s\" 2>&1" % (os.path.join(HomeDir, "push_native.py"), Arch, Name)
os.system(PushCommand)
print("Testing on device \"%s\"" % Name)
TestCommand = "%s \"%s\" \"%s\" 2>&1" % (os.path.join(HomeDir, "test_native.py"), Arch, Name)
os.system(TestCommand)
keys = s.split(";")
if (len(keys) < 2):
print("Error: invalid config line: \"%s\"" % s)
continue
Arch = keys[0]
Name = keys[1]
print("testing \"%s\" arch" % Arch)
print("Pushing to device \"%s\"" % Name)
PushCommand = "%s \"%s\" \"%s\" 2>&1" % (os.path.join(HomeDir, "push_native.py"), Arch, Name)
os.system(PushCommand)
print("Testing on device \"%s\"" % Name)
TestCommand = "%s \"%s\" \"%s\" 2>&1" % (os.path.join(HomeDir, "test_native.py"), Arch, Name)
os.system(TestCommand)

View File

@ -46,7 +46,7 @@ There is a very base code snippet implementing the async initialization with Bas
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}

View File

@ -40,6 +40,10 @@ OpenCV version constants
OpenCV Library version 2.4.2
.. data:: OPENCV_VERSION_2_4_3
OpenCV Library version 2.4.3
Other constatnts
----------------

View File

@ -1,8 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.opencv.engine"
android:versionCode="17"
android:versionName="1.7" >
android:versionCode="18"
android:versionName="1.8" >
<uses-sdk android:minSdkVersion="8" />
<uses-feature android:name="android.hardware.touchscreen" android:required="false"/>
@ -12,9 +12,9 @@
android:label="@string/app_name" >
<service android:exported="true" android:name="OpenCVEngineService" android:process=":OpenCVEngineProcess">
<intent-filter>
<action android:name="org.opencv.engine.BIND"></action>
</intent-filter>
<intent-filter>
<action android:name="org.opencv.engine.BIND"></action>
</intent-filter>
</service>
<activity
@ -22,7 +22,7 @@
android:label="@string/app_name"
android:screenOrientation="portrait">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>

View File

@ -21,53 +21,51 @@ status_t BnOpenCVEngine::onTransact(uint32_t code, const Parcel& data, android::
switch(code)
{
case OCVE_GET_ENGINE_VERSION:
{
LOGD("OpenCVEngine OCVE_GET_ENGINE_VERSION request");
CHECK_INTERFACE(IOpenCVEngine, data, reply);
LOGD("OpenCVEngine::GetVersion()");
reply->writeInt32(0);
return reply->writeInt32(GetVersion());
} break;
case OCVE_GET_LIB_PATH_BY_VERSION:
{
LOGD("OpenCVEngine OCVE_GET_LIB_PATH_BY_VERSION request");
CHECK_INTERFACE(IOpenCVEngine, data, reply);
const String16 version = data.readString16();
LOGD("OpenCVEngine::GetLibPathByVersion(%s)", String8(version).string());
String16 path = GetLibPathByVersion(version);
reply->writeInt32(0);
return reply->writeString16(path);
} break;
case OCVE_GET_LIB_LIST:
{
LOGD("OpenCVEngine OCVE_GET_LIB_LIST request");
CHECK_INTERFACE(IOpenCVEngine, data, reply);
const String16 version = data.readString16();
LOGD("OpenCVEngine::GetLibraryList(%s)", String8(version).string());
String16 path = GetLibraryList(version);
reply->writeInt32(0);
return reply->writeString16(path);
} break;
case OCVE_INSTALL_VERSION:
{
LOGD("OpenCVEngine OCVE_INSTALL_VERSION request");
CHECK_INTERFACE(IOpenCVEngine, data, reply);
const String16 version = data.readString16();
LOGD("OpenCVEngine::InstallVersion(%s)", String8(version).string());
bool result = InstallVersion(version);
reply->writeInt32(0);
int res = reply->writeInt32(static_cast<int32_t>(result));
LOGD("InstallVersion call to Binder finished with res %d", res);
return res;
} break;
default:
{
LOGD("OpenCVEngine unknown request");
return BBinder::onTransact(code, data, reply, flags);
}
case OCVE_GET_ENGINE_VERSION:
{
LOGD("OpenCVEngine OCVE_GET_ENGINE_VERSION request");
CHECK_INTERFACE(IOpenCVEngine, data, reply);
LOGD("OpenCVEngine::GetVersion()");
reply->writeInt32(0);
return reply->writeInt32(GetVersion());
} break;
case OCVE_GET_LIB_PATH_BY_VERSION:
{
LOGD("OpenCVEngine OCVE_GET_LIB_PATH_BY_VERSION request");
CHECK_INTERFACE(IOpenCVEngine, data, reply);
const String16 version = data.readString16();
LOGD("OpenCVEngine::GetLibPathByVersion(%s)", String8(version).string());
String16 path = GetLibPathByVersion(version);
reply->writeInt32(0);
return reply->writeString16(path);
} break;
case OCVE_GET_LIB_LIST:
{
LOGD("OpenCVEngine OCVE_GET_LIB_LIST request");
CHECK_INTERFACE(IOpenCVEngine, data, reply);
const String16 version = data.readString16();
LOGD("OpenCVEngine::GetLibraryList(%s)", String8(version).string());
String16 path = GetLibraryList(version);
reply->writeInt32(0);
return reply->writeString16(path);
} break;
case OCVE_INSTALL_VERSION:
{
LOGD("OpenCVEngine OCVE_INSTALL_VERSION request");
CHECK_INTERFACE(IOpenCVEngine, data, reply);
const String16 version = data.readString16();
LOGD("OpenCVEngine::InstallVersion(%s)", String8(version).string());
bool result = InstallVersion(version);
reply->writeInt32(0);
int res = reply->writeInt32(static_cast<int32_t>(result));
LOGD("InstallVersion call to Binder finished with res %d", res);
return res;
} break;
default:
{
LOGD("OpenCVEngine unknown request");
return BBinder::onTransact(code, data, reply, flags);
}
}
return android::NO_ERROR;

View File

@ -13,97 +13,97 @@ int GetCpuID()
map<string, string> cpu_info = GetCpuInfo();
map<string, string>::const_iterator it;
#if defined(__i386__)
#if defined(__i386__)
LOGD("Using X86 HW detector");
result |= ARCH_X86;
it = cpu_info.find("flags");
if (cpu_info.end() != it)
{
set<string> features = SplitString(it->second, ' ');
if (features.end() != features.find(CPU_INFO_SSE_STR))
{
result |= FEATURES_HAS_SSE;
}
if (features.end() != features.find(CPU_INFO_SSE2_STR))
{
result |= FEATURES_HAS_SSE2;
}
if (features.end() != features.find(CPU_INFO_SSSE3_STR))
{
result |= FEATURES_HAS_SSSE3;
}
set<string> features = SplitString(it->second, ' ');
if (features.end() != features.find(CPU_INFO_SSE_STR))
{
result |= FEATURES_HAS_SSE;
}
if (features.end() != features.find(CPU_INFO_SSE2_STR))
{
result |= FEATURES_HAS_SSE2;
}
if (features.end() != features.find(CPU_INFO_SSSE3_STR))
{
result |= FEATURES_HAS_SSSE3;
}
}
#elif defined(__mips)
#ifdef __SUPPORT_MIPS
#ifdef __SUPPORT_MIPS
result |= ARCH_MIPS;
#else
#else
result = ARCH_UNKNOWN;
#endif
#endif
#else
LOGD("Using ARM HW detector");
it = cpu_info.find("Processor");
if (cpu_info.end() != it)
{
size_t proc_name_pos = it->second.find(CPU_INFO_ARCH_X86_STR);
if (string::npos != proc_name_pos)
{
}
else
{
proc_name_pos = it->second.find(CPU_INFO_ARCH_ARMV7_STR);
size_t proc_name_pos = it->second.find(CPU_INFO_ARCH_X86_STR);
if (string::npos != proc_name_pos)
{
result |= ARCH_ARMv7;
}
else
{
proc_name_pos = it->second.find(CPU_INFO_ARCH_ARMV6_STR);
if (string::npos != proc_name_pos)
{
result |= ARCH_ARMv6;
}
else
{
proc_name_pos = it->second.find(CPU_INFO_ARCH_ARMV5_STR);
proc_name_pos = it->second.find(CPU_INFO_ARCH_ARMV7_STR);
if (string::npos != proc_name_pos)
{
result |= ARCH_ARMv5;
result |= ARCH_ARMv7;
}
else
{
proc_name_pos = it->second.find(CPU_INFO_ARCH_ARMV6_STR);
if (string::npos != proc_name_pos)
{
result |= ARCH_ARMv6;
}
else
{
proc_name_pos = it->second.find(CPU_INFO_ARCH_ARMV5_STR);
if (string::npos != proc_name_pos)
{
result |= ARCH_ARMv5;
}
}
}
}
}
}
}
else
{
return ARCH_UNKNOWN;
return ARCH_UNKNOWN;
}
it = cpu_info.find("Features");
if (cpu_info.end() != it)
{
set<string> features = SplitString(it->second, ' ');
if (features.end() != features.find(CPU_INFO_NEON_STR))
{
result |= FEATURES_HAS_NEON;
}
if (features.end() != features.find(CPU_INFO_NEON2_STR))
{
result |= FEATURES_HAS_NEON2;
}
if (features.end() != features.find(CPU_INFO_VFPV3_STR))
{
if (features.end () != features.find(CPU_INFO_VFPV3D16_STR))
set<string> features = SplitString(it->second, ' ');
if (features.end() != features.find(CPU_INFO_NEON_STR))
{
result |= FEATURES_HAS_VFPv3d16;
result |= FEATURES_HAS_NEON;
}
else
if (features.end() != features.find(CPU_INFO_NEON2_STR))
{
result |= FEATURES_HAS_VFPv3;
result |= FEATURES_HAS_NEON2;
}
if (features.end() != features.find(CPU_INFO_VFPV3_STR))
{
if (features.end () != features.find(CPU_INFO_VFPV3D16_STR))
{
result |= FEATURES_HAS_VFPv3d16;
}
else
{
result |= FEATURES_HAS_VFPv3;
}
}
}
}
#endif
#endif
return result;
}
@ -116,7 +116,7 @@ string GetPlatformName()
if (cpu_info.end() != hw_iterator)
{
hardware_name = hw_iterator->second;
hardware_name = hw_iterator->second;
}
return hardware_name;
@ -126,37 +126,37 @@ int GetProcessorCount()
{
FILE* cpuPossible = fopen("/sys/devices/system/cpu/possible", "r");
if(!cpuPossible)
return 1;
return 1;
char buf[2000]; //big enough for 1000 CPUs in worst possible configuration
char* pbuf = fgets(buf, sizeof(buf), cpuPossible);
fclose(cpuPossible);
if(!pbuf)
return 1;
return 1;
//parse string of form "0-1,3,5-7,10,13-15"
int cpusAvailable = 0;
int cpusAvailable = 0;
while(*pbuf)
{
const char* pos = pbuf;
bool range = false;
while(*pbuf && *pbuf != ',')
while(*pbuf)
{
if(*pbuf == '-') range = true;
++pbuf;
const char* pos = pbuf;
bool range = false;
while(*pbuf && *pbuf != ',')
{
if(*pbuf == '-') range = true;
++pbuf;
}
if(*pbuf) *pbuf++ = 0;
if(!range)
++cpusAvailable;
else
{
int rstart = 0, rend = 0;
sscanf(pos, "%d-%d", &rstart, &rend);
cpusAvailable += rend - rstart + 1;
}
}
if(*pbuf) *pbuf++ = 0;
if(!range)
++cpusAvailable;
else
{
int rstart = 0, rend = 0;
sscanf(pos, "%d-%d", &rstart, &rend);
cpusAvailable += rend - rstart + 1;
}
}
return cpusAvailable ? cpusAvailable : 1;
return cpusAvailable ? cpusAvailable : 1;
}
int DetectKnownPlatforms()
@ -165,20 +165,20 @@ int DetectKnownPlatforms()
if (3 == tegra_status)
{
return PLATFORM_TEGRA3;
return PLATFORM_TEGRA3;
}
else
{
return PLATFORM_UNKNOWN;
return PLATFORM_UNKNOWN;
}
// NOTE: Uncomment when all Tegras will be supported
/*if (tegra_status > 0)
{
return PLATFORM_TEGRA + tegra_status - 1;
}
else
{
return PLATFORM_UNKNOWN;
}*/
* {
* return PLATFORM_TEGRA + tegra_status - 1;
}
else
{
return PLATFORM_UNKNOWN;
}*/
}

View File

@ -23,6 +23,7 @@ std::set<std::string> OpenCVEngine::InitKnownOpenCVersions()
result.insert("240");
result.insert("241");
result.insert("242");
result.insert("243");
return result;
}
@ -41,31 +42,31 @@ std::string OpenCVEngine::NormalizeVersionString(std::string version)
if (version.empty())
{
return result;
return result;
}
if (('a' == version[version.size()-1]) || ('b' == version[version.size()-1]))
{
suffix = version[version.size()-1];
version.erase(version.size()-1);
suffix = version[version.size()-1];
version.erase(version.size()-1);
}
std::vector<std::string> parts = SplitStringVector(version, '.');
if (parts.size() >= 2)
{
if (parts.size() >= 3)
{
result = parts[0] + parts[1] + parts[2] + suffix;
if (!ValidateVersionString(result))
result = "";
}
else
{
result = parts[0] + parts[1] + "0" + suffix;
if (!ValidateVersionString(result))
result = "";
}
if (parts.size() >= 3)
{
result = parts[0] + parts[1] + parts[2] + suffix;
if (!ValidateVersionString(result))
result = "";
}
else
{
result = parts[0] + parts[1] + "0" + suffix;
if (!ValidateVersionString(result))
result = "";
}
}
return result;
@ -94,19 +95,19 @@ String16 OpenCVEngine::GetLibPathByVersion(android::String16 version)
if (!norm_version.empty())
{
path = PackageManager->GetPackagePathByVersion(norm_version, Platform, CpuID);
if (path.empty())
{
LOGI("Package OpenCV of version %s is not installed. Try to install it :)", norm_version.c_str());
path = PackageManager->GetPackagePathByVersion(norm_version, Platform, CpuID);
if (path.empty())
{
LOGI("Package OpenCV of version %s is not installed. Try to install it :)", norm_version.c_str());
}
else
{
FixPermissions(path);
}
}
else
{
FixPermissions(path);
}
}
else
{
LOGE("OpenCV version \"%s\" (%s) is not supported", String8(version).string(), norm_version.c_str());
LOGE("OpenCV version \"%s\" (%s) is not supported", String8(version).string(), norm_version.c_str());
}
return String16(path.c_str());
@ -121,46 +122,46 @@ android::String16 OpenCVEngine::GetLibraryList(android::String16 version)
if (!norm_version.empty())
{
std::string tmp = PackageManager->GetPackagePathByVersion(norm_version, Platform, CpuID);
if (!tmp.empty())
{
tmp += (std::string("/") + LIB_OPENCV_INFO_NAME);
LOGD("Trying to load info library \"%s\"", tmp.c_str());
void* handle;
char* (*info_func)();
handle = dlopen(tmp.c_str(), RTLD_LAZY);
if (handle)
std::string tmp = PackageManager->GetPackagePathByVersion(norm_version, Platform, CpuID);
if (!tmp.empty())
{
const char* error;
tmp += (std::string("/") + LIB_OPENCV_INFO_NAME);
dlerror();
*(void **) (&info_func) = dlsym(handle, "GetLibraryList");
if ((error = dlerror()) == NULL)
{
result = String16((*info_func)());
dlclose(handle);
LOGD("Trying to load info library \"%s\"", tmp.c_str());
void* handle;
char* (*info_func)();
handle = dlopen(tmp.c_str(), RTLD_LAZY);
if (handle)
{
const char* error;
dlerror();
*(void **) (&info_func) = dlsym(handle, "GetLibraryList");
if ((error = dlerror()) == NULL)
{
result = String16((*info_func)());
dlclose(handle);
}
else
{
LOGE("Library loading error: \"%s\"", error);
}
}
else
{
LOGI("Info library not found in package");
}
}
else
{
LOGE("Library loading error: \"%s\"", error);
}
}
else
{
LOGI("Info library not found in package");
LOGI("Package OpenCV of version %s is not installed. Try to install it :)", norm_version.c_str());
}
}
else
{
LOGI("Package OpenCV of version %s is not installed. Try to install it :)", norm_version.c_str());
}
}
else
{
LOGE("OpenCV version \"%s\" is not supported", norm_version.c_str());
LOGE("OpenCV version \"%s\" is not supported", norm_version.c_str());
}
return result;
@ -172,26 +173,18 @@ bool OpenCVEngine::InstallVersion(android::String16 version)
std::string norm_version;
bool result = false;
LOGD("OpenCVEngine::InstallVersion() begin");
norm_version = NormalizeVersionString(std_version);
if (!norm_version.empty())
{
LOGD("OpenCVEngine::InstallVersion() begin");
if (!PackageManager->CheckVersionInstalled(norm_version, Platform, CpuID))
{
LOGD("PackageManager->InstallVersion call");
result = PackageManager->InstallVersion(norm_version, Platform, CpuID);
}
else
{
LOGI("Package OpenCV of version %s is already installed. Skiped.", norm_version.c_str());
result = true;
}
}
else
{
LOGE("OpenCV version \"%s\" is not supported", norm_version.c_str());
LOGE("OpenCV version \"%s\" is not supported", norm_version.c_str());
}
LOGD("OpenCVEngine::InstallVersion() end");
@ -207,16 +200,16 @@ bool OpenCVEngine::FixPermissions(const std::string& path)
DIR* dir = opendir(path.c_str());
if (!dir)
{
LOGD("Fixing permissions error");
return false;
LOGD("Fixing permissions error");
return false;
}
dirent* files = readdir(dir);
while (files)
{
LOGD("Fix permissions for \"%s\"", files->d_name);
chmod((path + std::string("/") + std::string(files->d_name)).c_str(), S_IRUSR | S_IWUSR | S_IXUSR | S_IRGRP | S_IXGRP | S_IROTH | S_IXOTH);
files = readdir(dir);
LOGD("Fix permissions for \"%s\"", files->d_name);
chmod((path + std::string("/") + std::string(files->d_name)).c_str(), S_IRUSR | S_IWUSR | S_IXUSR | S_IRGRP | S_IXGRP | S_IROTH | S_IXOTH);
files = readdir(dir);
}
closedir(dir);

View File

@ -12,18 +12,18 @@ map<string, string> GetCpuInfo()
f.open("/proc/cpuinfo");
if (f.is_open())
{
while (!f.eof())
{
string tmp;
string key;
string value;
getline(f, tmp);
if (ParseString(tmp, key, value))
while (!f.eof())
{
result[key] = value;
string tmp;
string key;
string value;
getline(f, tmp);
if (ParseString(tmp, key, value))
{
result[key] = value;
}
}
}
}
f.close();

View File

@ -8,7 +8,7 @@ bool StripString(string& src)
if (src.empty())
{
return false;
return false;
}
while ((pos < src.length()) && (' ' == src[pos])) pos++;
@ -32,7 +32,7 @@ bool StripString(string& src)
bool ParseString(const string& src, string& key, string& value)
{
if (src.empty())
return false;
return false;
// find seporator ":"
size_t seporator_pos = src.find(":");
@ -52,20 +52,19 @@ bool ParseString(const string& src, string& key, string& value)
set<string> SplitString(const string& src, const char seporator)
{
set<string> result;
if (!src.empty())
{
size_t seporator_pos;
size_t prev_pos = 0;
do
{
seporator_pos = src.find(seporator, prev_pos);
result.insert(src.substr(prev_pos, seporator_pos - prev_pos));
prev_pos = seporator_pos + 1;
}
while (string::npos != seporator_pos);
size_t seporator_pos;
size_t prev_pos = 0;
do
{
seporator_pos = src.find(seporator, prev_pos);
result.insert(src.substr(prev_pos, seporator_pos - prev_pos));
prev_pos = seporator_pos + 1;
}
while (string::npos != seporator_pos);
}
return result;
@ -77,16 +76,16 @@ vector<string> SplitStringVector(const string& src, const char seporator)
if (!src.empty())
{
size_t seporator_pos;
size_t prev_pos = 0;
do
{
seporator_pos = src.find(seporator, prev_pos);
string tmp = src.substr(prev_pos, seporator_pos - prev_pos);
result.push_back(tmp);
prev_pos = seporator_pos + 1;
}
while (string::npos != seporator_pos);
size_t seporator_pos;
size_t prev_pos = 0;
do
{
seporator_pos = src.find(seporator, prev_pos);
string tmp = src.substr(prev_pos, seporator_pos - prev_pos);
result.push_back(tmp);
prev_pos = seporator_pos + 1;
}
while (string::npos != seporator_pos);
}
return result;

View File

@ -15,38 +15,38 @@ int DetectTegra()
gzFile kernelConfig = gzopen(KERNEL_CONFIG, "r");
if (kernelConfig != 0)
{
char tmpbuf[KERNEL_CONFIG_MAX_LINE_WIDTH];
const char *tegra_config = KERNEL_CONFIG_TEGRA_MAGIC;
const char *tegra2_config = KERNEL_CONFIG_TEGRA2_MAGIC;
const char *tegra3_config = KERNEL_CONFIG_TEGRA3_MAGIC;
int len = strlen(tegra_config);
int len2 = strlen(tegra2_config);
int len3 = strlen(tegra3_config);
while (0 != gzgets(kernelConfig, tmpbuf, KERNEL_CONFIG_MAX_LINE_WIDTH))
{
if (0 == strncmp(tmpbuf, tegra_config, len))
char tmpbuf[KERNEL_CONFIG_MAX_LINE_WIDTH];
const char *tegra_config = KERNEL_CONFIG_TEGRA_MAGIC;
const char *tegra2_config = KERNEL_CONFIG_TEGRA2_MAGIC;
const char *tegra3_config = KERNEL_CONFIG_TEGRA3_MAGIC;
int len = strlen(tegra_config);
int len2 = strlen(tegra2_config);
int len3 = strlen(tegra3_config);
while (0 != gzgets(kernelConfig, tmpbuf, KERNEL_CONFIG_MAX_LINE_WIDTH))
{
result = 1;
}
if (0 == strncmp(tmpbuf, tegra_config, len))
{
result = 1;
}
if (0 == strncmp(tmpbuf, tegra2_config, len2))
{
result = 2;
break;
}
if (0 == strncmp(tmpbuf, tegra2_config, len2))
{
result = 2;
break;
}
if (0 == strncmp(tmpbuf, tegra3_config, len3))
{
result = 3;
break;
}
if (0 == strncmp(tmpbuf, tegra3_config, len3))
{
result = 3;
break;
}
}
gzclose(kernelConfig);
}
gzclose(kernelConfig);
}
else
{
result = TEGRA_DETECTOR_ERROR;
result = TEGRA_DETECTOR_ERROR;
}
return result;

View File

@ -8,8 +8,8 @@
using namespace std;
JavaBasedPackageManager::JavaBasedPackageManager(JavaVM* JavaMashine, jobject MarketConnector):
JavaContext(JavaMashine),
JavaPackageManager(MarketConnector)
JavaContext(JavaMashine),
JavaPackageManager(MarketConnector)
{
assert(JavaContext);
assert(JavaPackageManager);
@ -24,23 +24,23 @@ bool JavaBasedPackageManager::InstallPackage(const PackageInfo& package)
self_attached = (JNI_EDETACHED == JavaContext->GetEnv((void**)&jenv, JNI_VERSION_1_6));
if (self_attached)
{
JavaContext->AttachCurrentThread(&jenv, NULL);
JavaContext->AttachCurrentThread(&jenv, NULL);
}
LOGD("GetObjectClass call\n");
jclass jclazz = jenv->GetObjectClass(JavaPackageManager);
if (!jclazz)
{
LOGE("MarketConnector class was not found!");
return false;
LOGE("MarketConnector class was not found!");
return false;
}
LOGD("GetMethodID call\n");
jmethodID jmethod = jenv->GetMethodID(jclazz, "InstallAppFromMarket", "(Ljava/lang/String;)Z");
if (!jmethod)
{
LOGE("MarketConnector::GetAppFormMarket method was not found!");
return false;
LOGE("MarketConnector::GetAppFormMarket method was not found!");
return false;
}
LOGD("Calling java package manager with package name %s\n", package.GetFullName().c_str());
@ -50,7 +50,7 @@ bool JavaBasedPackageManager::InstallPackage(const PackageInfo& package)
if (self_attached)
{
JavaContext->DetachCurrentThread();
JavaContext->DetachCurrentThread();
}
LOGD("JavaBasedPackageManager::InstallPackage() end\n");
@ -69,23 +69,23 @@ vector<PackageInfo> JavaBasedPackageManager::GetInstalledPackages()
self_attached = (JNI_EDETACHED == JavaContext->GetEnv((void**)&jenv, JNI_VERSION_1_6));
if (self_attached)
{
JavaContext->AttachCurrentThread(&jenv, NULL);
JavaContext->AttachCurrentThread(&jenv, NULL);
}
LOGD("GetObjectClass call");
jclass jclazz = jenv->GetObjectClass(JavaPackageManager);
if (!jclazz)
{
LOGE("MarketConnector class was not found!");
return result;
LOGE("MarketConnector class was not found!");
return result;
}
LOGD("GetMethodID call");
jmethodID jmethod = jenv->GetMethodID(jclazz, "GetInstalledOpenCVPackages", "()[Landroid/content/pm/PackageInfo;");
if (!jmethod)
{
LOGE("MarketConnector::GetInstalledOpenCVPackages method was not found!");
return result;
LOGE("MarketConnector::GetInstalledOpenCVPackages method was not found!");
return result;
}
LOGD("Java package manager call");
@ -98,19 +98,19 @@ vector<PackageInfo> JavaBasedPackageManager::GetInstalledPackages()
for (jsize i = 0; i < size; i++)
{
jobject jtmp = jenv->GetObjectArrayElement(jpkgs, i);
PackageInfo tmp = ConvertPackageFromJava(jtmp, jenv);
jenv->DeleteLocalRef(jtmp);
jobject jtmp = jenv->GetObjectArrayElement(jpkgs, i);
PackageInfo tmp = ConvertPackageFromJava(jtmp, jenv);
jenv->DeleteLocalRef(jtmp);
if (tmp.IsValid())
result.push_back(tmp);
if (tmp.IsValid())
result.push_back(tmp);
}
jenv->DeleteLocalRef(jpkgs);
if (self_attached)
{
JavaContext->DetachCurrentThread();
JavaContext->DetachCurrentThread();
}
LOGD("JavaBasedPackageManager::GetInstalledPackages() end");
@ -140,20 +140,20 @@ PackageInfo JavaBasedPackageManager::ConvertPackageFromJava(jobject package, JNI
jint api_level = jenv->GetStaticIntField(jclazz, jfield);
if (api_level > 8)
{
jclazz = jenv->GetObjectClass(package);
jfield = jenv->GetFieldID(jclazz, "applicationInfo", "Landroid/content/pm/ApplicationInfo;");
jobject japp_info = jenv->GetObjectField(package, jfield);
jclazz = jenv->GetObjectClass(japp_info);
jfield = jenv->GetFieldID(jclazz, "nativeLibraryDir", "Ljava/lang/String;");
jstring jpathobj = static_cast<jstring>(jenv->GetObjectField(japp_info, jfield));
const char* jpathstr = jenv->GetStringUTFChars(jpathobj, NULL);
path = string(jpathstr);
jenv->ReleaseStringUTFChars(jpathobj, jpathstr);
jenv->DeleteLocalRef(jpathobj);
jclazz = jenv->GetObjectClass(package);
jfield = jenv->GetFieldID(jclazz, "applicationInfo", "Landroid/content/pm/ApplicationInfo;");
jobject japp_info = jenv->GetObjectField(package, jfield);
jclazz = jenv->GetObjectClass(japp_info);
jfield = jenv->GetFieldID(jclazz, "nativeLibraryDir", "Ljava/lang/String;");
jstring jpathobj = static_cast<jstring>(jenv->GetObjectField(japp_info, jfield));
const char* jpathstr = jenv->GetStringUTFChars(jpathobj, NULL);
path = string(jpathstr);
jenv->ReleaseStringUTFChars(jpathobj, jpathstr);
jenv->DeleteLocalRef(jpathobj);
}
else
{
path = "/data/data/" + name + "/lib";
path = "/data/data/" + name + "/lib";
}
return PackageInfo(name, path, verison);
@ -170,14 +170,14 @@ JavaBasedPackageManager::~JavaBasedPackageManager()
self_attached = (JNI_EDETACHED == JavaContext->GetEnv((void**)&jenv, JNI_VERSION_1_6));
if (self_attached)
{
JavaContext->AttachCurrentThread(&jenv, NULL);
JavaContext->AttachCurrentThread(&jenv, NULL);
}
jenv->DeleteGlobalRef(JavaPackageManager);
if (self_attached)
{
JavaContext->DetachCurrentThread();
JavaContext->DetachCurrentThread();
}
LOGD("JavaBasedPackageManager::~JavaBasedPackageManager() end");
}

View File

@ -20,11 +20,11 @@ JNIEXPORT jobject JNICALL Java_org_opencv_engine_BinderConnector_Connect(JNIEnv*
LOGI("Creating new component");
if (NULL != OpenCVEngineBinder.get())
{
LOGI("New component created successfully");
LOGI("New component created successfully");
}
else
{
LOGE("OpenCV Engine component was not created!");
LOGE("OpenCV Engine component was not created!");
}
return javaObjectForIBinder(env, OpenCVEngineBinder);
@ -36,25 +36,25 @@ JNIEXPORT jboolean JNICALL Java_org_opencv_engine_BinderConnector_Init(JNIEnv* e
if (NULL == PackageManager)
{
JavaVM* jvm;
env->GetJavaVM(&jvm);
PackageManager = new JavaBasedPackageManager(jvm, env->NewGlobalRef(market));
JavaVM* jvm;
env->GetJavaVM(&jvm);
PackageManager = new JavaBasedPackageManager(jvm, env->NewGlobalRef(market));
}
if (PackageManager)
{
if (!OpenCVEngineBinder.get())
{
OpenCVEngineBinder = new OpenCVEngine(PackageManager);
return (NULL != OpenCVEngineBinder.get());
if (!OpenCVEngineBinder.get())
{
OpenCVEngineBinder = new OpenCVEngine(PackageManager);
return (NULL != OpenCVEngineBinder.get());
}
else
{
return true;
}
}
else
{
return true;
}
}
else
{
return false;
return false;
}
}
@ -67,4 +67,3 @@ JNIEXPORT void JNICALL Java_org_opencv_engine_BinderConnector_Final(JNIEnv *, jo
delete PackageManager;
PackageManager = NULL;
}

View File

@ -25,10 +25,10 @@ int main(int argc, char *argv[])
LOGI("Trying to contect to service");
do {
EngineService = ServiceManager->getService(IOpenCVEngine::descriptor);
if (EngineService != 0) break;
LOGW("OpenCVEngine not published, waiting...");
usleep(500000); // 0.5 s
EngineService = ServiceManager->getService(IOpenCVEngine::descriptor);
if (EngineService != 0) break;
LOGW("OpenCVEngine not published, waiting...");
usleep(500000); // 0.5 s
} while(true);
LOGI("Connection established");

View File

@ -18,9 +18,9 @@ set<string> CommonPackageManager::GetInstalledVersions()
for (vector<PackageInfo>::const_iterator it = installed_packages.begin(); it != installed_packages.end(); ++it)
{
string version = it->GetVersion();
assert(!version.empty());
result.insert(version);
string version = it->GetVersion();
assert(!version.empty());
result.insert(version);
}
return result;
@ -36,12 +36,13 @@ bool CommonPackageManager::CheckVersionInstalled(const std::string& version, int
for (vector<PackageInfo>::const_iterator it = packages.begin(); it != packages.end(); ++it)
{
LOGD("Found package: \"%s\"", it->GetFullName().c_str());
LOGD("Found package: \"%s\"", it->GetFullName().c_str());
}
if (!packages.empty())
{
result = (packages.end() != find(packages.begin(), packages.end(), target_package));
vector<PackageInfo>::const_iterator it = find(packages.begin(), packages.end(), target_package);
result = (it != packages.end());
}
LOGD("CommonPackageManager::CheckVersionInstalled() end");
return result;
@ -63,65 +64,65 @@ string CommonPackageManager::GetPackagePathByVersion(const std::string& version,
for (vector<PackageInfo>::iterator it = all_packages.begin(); it != all_packages.end(); ++it)
{
LOGD("Check version \"%s\" compatibility with \"%s\"\n", version.c_str(), it->GetVersion().c_str());
if (IsVersionCompatible(version, it->GetVersion()))
{
LOGD("Compatible");
packages.push_back(*it);
}
else
{
LOGD("NOT Compatible");
}
LOGD("Check version \"%s\" compatibility with \"%s\"\n", version.c_str(), it->GetVersion().c_str());
if (IsVersionCompatible(version, it->GetVersion()))
{
LOGD("Compatible");
packages.push_back(*it);
}
else
{
LOGD("NOT Compatible");
}
}
if (!packages.empty())
{
vector<PackageInfo>::iterator found = find(packages.begin(), packages.end(), target_package);
if (packages.end() != found)
{
result = found->GetInstalationPath();
}
else
{
int OptRating = -1;
std::vector<std::pair<int, int> >& group = CommonPackageManager::ArmRating;
if ((cpu_id & ARCH_X86) || (cpu_id & ARCH_X64))
group = CommonPackageManager::IntelRating;
int HardwareRating = GetHardwareRating(platform, cpu_id, group);
LOGD("Current hardware platform %d, %d", platform, cpu_id);
if (-1 == HardwareRating)
{
LOGE("Cannot calculate rating for current hardware platform!");
}
else
{
for (vector<PackageInfo>::iterator it = packages.begin(); it != packages.end(); ++it)
{
int PackageRating = GetHardwareRating(it->GetPlatform(), it->GetCpuID(), group);
if (PackageRating >= 0)
{
if ((PackageRating <= HardwareRating) && (PackageRating > OptRating))
{
OptRating = PackageRating;
found = it;
}
}
}
if ((-1 != OptRating) && (packages.end() != found))
vector<PackageInfo>::iterator found = find(packages.begin(), packages.end(), target_package);
if (packages.end() != found)
{
result = found->GetInstalationPath();
}
else
{
LOGI("Found package is incompatible with current hardware platform");
int OptRating = -1;
std::vector<std::pair<int, int> >& group = CommonPackageManager::ArmRating;
if ((cpu_id & ARCH_X86) || (cpu_id & ARCH_X64))
group = CommonPackageManager::IntelRating;
int HardwareRating = GetHardwareRating(platform, cpu_id, group);
LOGD("Current hardware platform %d, %d", platform, cpu_id);
if (-1 == HardwareRating)
{
LOGE("Cannot calculate rating for current hardware platform!");
}
else
{
for (vector<PackageInfo>::iterator it = packages.begin(); it != packages.end(); ++it)
{
int PackageRating = GetHardwareRating(it->GetPlatform(), it->GetCpuID(), group);
if (PackageRating >= 0)
{
if ((PackageRating <= HardwareRating) && (PackageRating > OptRating))
{
OptRating = PackageRating;
found = it;
}
}
}
if ((-1 != OptRating) && (packages.end() != found))
{
result = found->GetInstalationPath();
}
else
{
LOGI("Found package is incompatible with current hardware platform");
}
}
}
}
}
}
return result;

View File

@ -32,23 +32,23 @@ inline string JoinARMFeatures(int cpu_id)
if (FEATURES_HAS_NEON2 & cpu_id)
{
if (!((ARCH_ARMv5 & cpu_id) || (ARCH_ARMv6 & cpu_id) ||(ARCH_ARMv7 & cpu_id)))
result = string(FEATURES_HAS_NEON2_NAME);
if (!((ARCH_ARMv5 & cpu_id) || (ARCH_ARMv6 & cpu_id) ||(ARCH_ARMv7 & cpu_id)))
result = string(FEATURES_HAS_NEON2_NAME);
}
else if (FEATURES_HAS_NEON & cpu_id)
{
if (!((ARCH_ARMv5 & cpu_id) || (ARCH_ARMv6 & cpu_id)))
result = string(FEATURES_HAS_NEON_NAME);
if (!((ARCH_ARMv5 & cpu_id) || (ARCH_ARMv6 & cpu_id)))
result = string(FEATURES_HAS_NEON_NAME);
}
else if (FEATURES_HAS_VFPv3 & cpu_id)
{
if ((ARCH_ARMv5 & cpu_id) || (ARCH_ARMv6 & cpu_id))
result = string(FEATURES_HAS_VFPv3_NAME);
if ((ARCH_ARMv5 & cpu_id) || (ARCH_ARMv6 & cpu_id))
result = string(FEATURES_HAS_VFPv3_NAME);
}
else if (FEATURES_HAS_VFPv3d16 & cpu_id)
{
if ((ARCH_ARMv5 & cpu_id) || (ARCH_ARMv6 & cpu_id))
result = string(FEATURES_HAS_VFPv3d16_NAME);
if ((ARCH_ARMv5 & cpu_id) || (ARCH_ARMv6 & cpu_id))
result = string(FEATURES_HAS_VFPv3d16_NAME);
}
return result;
@ -60,22 +60,22 @@ inline int SplitARMFeatures(const vector<string>& features)
for (size_t i = 3; i < features.size(); i++)
{
if (FEATURES_HAS_VFPv3_NAME == features[i])
{
result |= FEATURES_HAS_VFPv3;
}
else if (FEATURES_HAS_VFPv3d16_NAME == features[i])
{
result |= FEATURES_HAS_VFPv3d16;
}
else if (FEATURES_HAS_NEON_NAME == features[i])
{
result |= FEATURES_HAS_NEON;
}
else if (FEATURES_HAS_NEON2_NAME == features[i])
{
result |= FEATURES_HAS_NEON2;
}
if (FEATURES_HAS_VFPv3_NAME == features[i])
{
result |= FEATURES_HAS_VFPv3;
}
else if (FEATURES_HAS_VFPv3d16_NAME == features[i])
{
result |= FEATURES_HAS_VFPv3d16;
}
else if (FEATURES_HAS_NEON_NAME == features[i])
{
result |= FEATURES_HAS_NEON;
}
else if (FEATURES_HAS_NEON2_NAME == features[i])
{
result |= FEATURES_HAS_NEON2;
}
}
return result;
@ -87,15 +87,15 @@ inline string JoinIntelFeatures(int cpu_id)
if (FEATURES_HAS_SSSE3 & cpu_id)
{
result = FEATURES_HAS_SSSE3_NAME;
result = FEATURES_HAS_SSSE3_NAME;
}
else if (FEATURES_HAS_SSE2 & cpu_id)
{
result = FEATURES_HAS_SSE2_NAME;
result = FEATURES_HAS_SSE2_NAME;
}
else if (FEATURES_HAS_SSE & cpu_id)
{
result = FEATURES_HAS_SSE_NAME;
result = FEATURES_HAS_SSE_NAME;
}
return result;
@ -107,18 +107,18 @@ inline int SplitIntelFeatures(const vector<string>& features)
for (size_t i = 3; i < features.size(); i++)
{
if (FEATURES_HAS_SSSE3_NAME == features[i])
{
result |= FEATURES_HAS_SSSE3;
}
else if (FEATURES_HAS_SSE2_NAME == features[i])
{
result |= FEATURES_HAS_SSE2;
}
else if (FEATURES_HAS_SSE_NAME == features[i])
{
result |= FEATURES_HAS_SSE;
}
if (FEATURES_HAS_SSSE3_NAME == features[i])
{
result |= FEATURES_HAS_SSSE3;
}
else if (FEATURES_HAS_SSE2_NAME == features[i])
{
result |= FEATURES_HAS_SSE2;
}
else if (FEATURES_HAS_SSE_NAME == features[i])
{
result |= FEATURES_HAS_SSE;
}
}
return result;
@ -130,12 +130,12 @@ inline string SplitVersion(const vector<string>& features, const string& package
if ((features.size() > 1) && ('v' == features[1][0]))
{
result = features[1].substr(1);
result += SplitStringVector(package_version, '.')[0];
result = features[1].substr(1);
result += SplitStringVector(package_version, '.')[0];
}
else
{
// TODO: Report package name format error
// TODO: Report package name format error
}
return result;
@ -158,23 +158,23 @@ inline int SplitPlatfrom(const vector<string>& features)
if (features.size() > 2)
{
string tmp = features[2];
if (PLATFORM_TEGRA_NAME == tmp)
{
result = PLATFORM_TEGRA;
}
else if (PLATFORM_TEGRA2_NAME == tmp)
{
result = PLATFORM_TEGRA2;
}
else if (PLATFORM_TEGRA3_NAME == tmp)
{
result = PLATFORM_TEGRA3;
}
string tmp = features[2];
if (PLATFORM_TEGRA_NAME == tmp)
{
result = PLATFORM_TEGRA;
}
else if (PLATFORM_TEGRA2_NAME == tmp)
{
result = PLATFORM_TEGRA2;
}
else if (PLATFORM_TEGRA3_NAME == tmp)
{
result = PLATFORM_TEGRA3;
}
}
else
{
// TODO: Report package name format error
// TODO: Report package name format error
}
return result;
@ -189,135 +189,135 @@ inline int SplitPlatfrom(const vector<string>& features)
* Example: armv7_neon, armv5_vfpv3
*/
PackageInfo::PackageInfo(const string& version, int platform, int cpu_id, std::string install_path):
Version(version),
Platform(platform),
CpuID(cpu_id),
InstallPath("")
Version(version),
Platform(platform),
CpuID(cpu_id),
InstallPath("")
{
#ifndef __SUPPORT_TEGRA3
#ifndef __SUPPORT_TEGRA3
Platform = PLATFORM_UNKNOWN;
#endif
#endif
FullName = BasePackageName + "_v" + Version.substr(0, Version.size()-1);
if (PLATFORM_UNKNOWN != Platform)
{
FullName += string("_") + JoinPlatform(platform);
FullName += string("_") + JoinPlatform(platform);
}
else
{
if (ARCH_UNKNOWN != CpuID)
{
if (ARCH_X86 & CpuID)
if (ARCH_UNKNOWN != CpuID)
{
LOGD("PackageInfo::PackageInfo: package arch x86");
FullName += string("_") + ARCH_X86_NAME;
#ifdef __SUPPORT_INTEL_FEATURES
string features = JoinIntelFeatures(CpuID);
if (!features.empty())
{
FullName += string("_") + features;
if (ARCH_X86 & CpuID)
{
LOGD("PackageInfo::PackageInfo: package arch x86");
FullName += string("_") + ARCH_X86_NAME;
#ifdef __SUPPORT_INTEL_FEATURES
string features = JoinIntelFeatures(CpuID);
if (!features.empty())
{
FullName += string("_") + features;
}
#endif
}
else if (ARCH_X64 & CpuID)
{
LOGD("PackageInfo::PackageInfo: package arch x64");
#ifdef __SUPPORT_INTEL_x64
FullName += string("_") + ARCH_X64_NAME;
#else
FullName += string("_") + ARCH_X86_NAME;
#endif
#ifdef __SUPPORT_INTEL_FEATURES
string features = JoinIntelFeatures(CpuID);
if (!features.empty())
{
FullName += string("_") + features;
}
#endif
}
else if (ARCH_ARMv5 & CpuID)
{
LOGD("PackageInfo::PackageInfo: package arch ARMv5");
FullName += string("_") + ARCH_ARMv5_NAME;
#ifdef __SUPPORT_ARMEABI_FEATURES
string features = JoinARMFeatures(CpuID);
if (!features.empty())
{
FullName += string("_") + features;
}
#endif
}
else if (ARCH_ARMv6 & CpuID)
{
LOGD("PackageInfo::PackageInfo: package arch ARMv6");
// NOTE: ARM v5 used instead ARM v6
//FullName += string("_") + ARCH_ARMv6_NAME;
FullName += string("_") + ARCH_ARMv5_NAME;
#ifdef __SUPPORT_ARMEABI_FEATURES
string features = JoinARMFeatures(CpuID);
if (!features.empty())
{
FullName += string("_") + features;
}
#endif
}
else if (ARCH_ARMv7 & CpuID)
{
LOGD("PackageInfo::PackageInfo: package arch ARMv7");
FullName += string("_") + ARCH_ARMv7_NAME;
#ifdef __SUPPORT_ARMEABI_V7A_FEATURES
string features = JoinARMFeatures(CpuID);
if (!features.empty())
{
FullName += string("_") + features;
}
#endif
}
else if (ARCH_ARMv8 & CpuID)
{
LOGD("PackageInfo::PackageInfo: package arch ARMv8");
#ifdef __SUPPORT_ARMEABI_V8
FullName += string("_") + ARCH_ARMv8_NAME;
#else
FullName += string("_") + ARCH_ARMv7_NAME;
#endif
//string features = JoinARMFeatures(CpuID);
//if (!features.empty())
//{
// FullName += string("_") + features;
//}
}
#ifdef __SUPPORT_MIPS
else if (ARCH_MIPS & CpuID)
{
FullName += string("_") + ARCH_MIPS_NAME;
}
#endif
else
{
LOGD("PackageInfo::PackageInfo: package arch unknown");
Version.clear();
CpuID = ARCH_UNKNOWN;
Platform = PLATFORM_UNKNOWN;
}
}
#endif
}
else if (ARCH_X64 & CpuID)
{
LOGD("PackageInfo::PackageInfo: package arch x64");
#ifdef __SUPPORT_INTEL_x64
FullName += string("_") + ARCH_X64_NAME;
#else
FullName += string("_") + ARCH_X86_NAME;
#endif
#ifdef __SUPPORT_INTEL_FEATURES
string features = JoinIntelFeatures(CpuID);
if (!features.empty())
{
FullName += string("_") + features;
}
#endif
}
else if (ARCH_ARMv5 & CpuID)
{
LOGD("PackageInfo::PackageInfo: package arch ARMv5");
FullName += string("_") + ARCH_ARMv5_NAME;
#ifdef __SUPPORT_ARMEABI_FEATURES
string features = JoinARMFeatures(CpuID);
if (!features.empty())
{
FullName += string("_") + features;
}
#endif
}
else if (ARCH_ARMv6 & CpuID)
{
LOGD("PackageInfo::PackageInfo: package arch ARMv6");
// NOTE: ARM v5 used instead ARM v6
//FullName += string("_") + ARCH_ARMv6_NAME;
FullName += string("_") + ARCH_ARMv5_NAME;
#ifdef __SUPPORT_ARMEABI_FEATURES
string features = JoinARMFeatures(CpuID);
if (!features.empty())
{
FullName += string("_") + features;
}
#endif
}
else if (ARCH_ARMv7 & CpuID)
{
LOGD("PackageInfo::PackageInfo: package arch ARMv7");
FullName += string("_") + ARCH_ARMv7_NAME;
#ifdef __SUPPORT_ARMEABI_V7A_FEATURES
string features = JoinARMFeatures(CpuID);
if (!features.empty())
{
FullName += string("_") + features;
}
#endif
}
else if (ARCH_ARMv8 & CpuID)
{
LOGD("PackageInfo::PackageInfo: package arch ARMv8");
#ifdef __SUPPORT_ARMEABI_V8
FullName += string("_") + ARCH_ARMv8_NAME;
#else
FullName += string("_") + ARCH_ARMv7_NAME;
#endif
//string features = JoinARMFeatures(CpuID);
//if (!features.empty())
//{
// FullName += string("_") + features;
//}
}
#ifdef __SUPPORT_MIPS
else if (ARCH_MIPS & CpuID)
{
FullName += string("_") + ARCH_MIPS_NAME;
}
#endif
else
{
LOGD("PackageInfo::PackageInfo: package arch unknown");
Version.clear();
CpuID = ARCH_UNKNOWN;
Platform = PLATFORM_UNKNOWN;
LOGD("PackageInfo::PackageInfo: package arch unknown");
Version.clear();
CpuID = ARCH_UNKNOWN;
Platform = PLATFORM_UNKNOWN;
}
}
else
{
LOGD("PackageInfo::PackageInfo: package arch unknown");
Version.clear();
CpuID = ARCH_UNKNOWN;
Platform = PLATFORM_UNKNOWN;
}
}
if (!FullName.empty())
{
InstallPath = install_path + FullName + "/lib";
InstallPath = install_path + FullName + "/lib";
}
}
PackageInfo::PackageInfo(const string& fullname, const string& install_path, string package_version):
FullName(fullname),
InstallPath(install_path)
FullName(fullname),
InstallPath(install_path)
{
LOGD("PackageInfo::PackageInfo(\"%s\", \"%s\", \"%s\")", fullname.c_str(), install_path.c_str(), package_version.c_str());
@ -326,127 +326,127 @@ PackageInfo::PackageInfo(const string& fullname, const string& install_path, str
if (OPENCV_ENGINE_PACKAGE == fullname)
{
// Science version 1.7 OpenCV Manager has it's own version of OpenCV inside
// Load libopencv_info.so to understand OpenCV version, platform and other features
std::string tmp;
if (install_path.empty())
{
tmp = std::string(DEFAULT_ENGINE_INSTALL_PATH) + "/" + LIB_OPENCV_INFO_NAME;
}
else
{
tmp = install_path + "/" + LIB_OPENCV_INFO_NAME;
}
LOGD("Trying to load info library \"%s\"", tmp.c_str());
void* handle;
const char* (*name_func)();
const char* (*revision_func)();
handle = dlopen(tmp.c_str(), RTLD_LAZY);
if (handle)
{
const char* error;
dlerror();
*(void **) (&name_func) = dlsym(handle, "GetPackageName");
*(void **) (&revision_func) = dlsym(handle, "GetRevision");
error = dlerror();
if (!error && revision_func && name_func)
// Science version 1.7 OpenCV Manager has it's own version of OpenCV inside
// Load libopencv_info.so to understand OpenCV version, platform and other features
std::string tmp;
if (install_path.empty())
{
FullName = std::string((*name_func)());
package_version = std::string((*revision_func)());
dlclose(handle);
LOGI("OpenCV package \"%s\" revision \"%s\" found", FullName.c_str(), package_version.c_str());
tmp = std::string(DEFAULT_ENGINE_INSTALL_PATH) + "/" + LIB_OPENCV_INFO_NAME;
}
else
{
LOGE("Library loading error (%x, %x): \"%s\"", name_func, revision_func, error);
tmp = install_path + "/" + LIB_OPENCV_INFO_NAME;
}
}
else
{
LOGI("Info library not found in package");
LOGI("OpenCV Manager package does not contain any verison of OpenCV library");
Version.clear();
CpuID = ARCH_UNKNOWN;
Platform = PLATFORM_UNKNOWN;
return;
}
LOGD("Trying to load info library \"%s\"", tmp.c_str());
void* handle;
const char* (*name_func)();
const char* (*revision_func)();
handle = dlopen(tmp.c_str(), RTLD_LAZY);
if (handle)
{
const char* error;
dlerror();
*(void **) (&name_func) = dlsym(handle, "GetPackageName");
*(void **) (&revision_func) = dlsym(handle, "GetRevision");
error = dlerror();
if (!error && revision_func && name_func)
{
FullName = std::string((*name_func)());
package_version = std::string((*revision_func)());
dlclose(handle);
LOGI("OpenCV package \"%s\" revision \"%s\" found", FullName.c_str(), package_version.c_str());
}
else
{
LOGE("Library loading error (%x, %x): \"%s\"", name_func, revision_func, error);
}
}
else
{
LOGI("Info library not found in package");
LOGI("OpenCV Manager package does not contain any verison of OpenCV library");
Version.clear();
CpuID = ARCH_UNKNOWN;
Platform = PLATFORM_UNKNOWN;
return;
}
}
vector<string> features = SplitStringVector(FullName, '_');
if (!features.empty() && (BasePackageName == features[0]))
{
Version = SplitVersion(features, package_version);
if (Version.empty())
{
CpuID = ARCH_UNKNOWN;
Platform = PLATFORM_UNKNOWN;
return;
}
Version = SplitVersion(features, package_version);
if (Version.empty())
{
CpuID = ARCH_UNKNOWN;
Platform = PLATFORM_UNKNOWN;
return;
}
Platform = SplitPlatfrom(features);
if (PLATFORM_UNKNOWN != Platform)
{
CpuID = 0;
}
else
{
if (features.size() < 3)
Platform = SplitPlatfrom(features);
if (PLATFORM_UNKNOWN != Platform)
{
LOGD("It is not OpenCV library package for this platform");
Version.clear();
CpuID = ARCH_UNKNOWN;
Platform = PLATFORM_UNKNOWN;
return;
CpuID = 0;
}
else if (ARCH_ARMv5_NAME == features[2])
{
CpuID = ARCH_ARMv5 | SplitARMFeatures(features);
}
else if (ARCH_ARMv6_NAME == features[2])
{
CpuID = ARCH_ARMv6 | SplitARMFeatures(features);
}
else if (ARCH_ARMv7_NAME == features[2])
{
CpuID = ARCH_ARMv7 | SplitARMFeatures(features);
}
else if (ARCH_X86_NAME == features[2])
{
CpuID = ARCH_X86 | SplitIntelFeatures(features);
}
else if (ARCH_X64_NAME == features[2])
{
CpuID = ARCH_X64 | SplitIntelFeatures(features);
}
#ifdef __SUPPORT_MIPS
else if (ARCH_MIPS_NAME == features[2])
{
CpuID = ARCH_MIPS;
}
#endif
else
{
if (features.size() < 3)
{
LOGD("It is not OpenCV library package for this platform");
Version.clear();
CpuID = ARCH_UNKNOWN;
Platform = PLATFORM_UNKNOWN;
return;
}
else if (ARCH_ARMv5_NAME == features[2])
{
CpuID = ARCH_ARMv5 | SplitARMFeatures(features);
}
else if (ARCH_ARMv6_NAME == features[2])
{
CpuID = ARCH_ARMv6 | SplitARMFeatures(features);
}
else if (ARCH_ARMv7_NAME == features[2])
{
CpuID = ARCH_ARMv7 | SplitARMFeatures(features);
}
else if (ARCH_X86_NAME == features[2])
{
CpuID = ARCH_X86 | SplitIntelFeatures(features);
}
else if (ARCH_X64_NAME == features[2])
{
CpuID = ARCH_X64 | SplitIntelFeatures(features);
}
#ifdef __SUPPORT_MIPS
else if (ARCH_MIPS_NAME == features[2])
{
CpuID = ARCH_MIPS;
}
#endif
else
{
LOGD("It is not OpenCV library package for this platform");
Version.clear();
CpuID = ARCH_UNKNOWN;
Platform = PLATFORM_UNKNOWN;
return;
}
}
}
else
{
LOGD("It is not OpenCV library package for this platform");
Version.clear();
CpuID = ARCH_UNKNOWN;
Platform = PLATFORM_UNKNOWN;
return;
}
}
}
else
{
LOGD("It is not OpenCV library package for this platform");
Version.clear();
CpuID = ARCH_UNKNOWN;
Platform = PLATFORM_UNKNOWN;
return;
}
}

View File

@ -126,7 +126,7 @@ TEST(OpenCVEngineTest, GetPathForExecHWNewVersion)
EXPECT_EQ(0, result.size());
}
#else
#else // armeabi
TEST(OpenCVEngineTest, GetPathForExecHWExistVersion)
{
sp<IOpenCVEngine> Engine = InitConnect();

View File

@ -10,8 +10,8 @@ DEVICE_ARCH = "armeabi"
if (__name__ == "__main__"):
if (len(sys.argv) >= 3):
DEVICE_ARCH = sys.argv[1]
DEVICE_NAME = sys.argv[2]
DEVICE_ARCH = sys.argv[1]
DEVICE_NAME = sys.argv[2]
if (DEVICE_NAME != ""):
DEVICE_STR = "-s \"" + DEVICE_NAME + "\""

View File

@ -18,14 +18,14 @@ def RunTestApp(AppName):
if (__name__ == "__main__"):
if (3 == len(sys.argv)):
DEVICE_ARCH = sys.argv[1]
DEVICE_NAME = sys.argv[2]
DEVICE_ARCH = sys.argv[1]
DEVICE_NAME = sys.argv[2]
if (DEVICE_NAME != ""):
DEVICE_STR = "-s \"" + DEVICE_NAME + "\""
if (not os.path.exists(LOCAL_LOG_PATH)):
os.makedirs(LOCAL_LOG_PATH)
os.makedirs(LOCAL_LOG_PATH)
print("Waiting for device \"%s\" with arch \"%s\" ..." % (DEVICE_NAME, DEVICE_ARCH))
os.system("adb %s wait-for-device" % DEVICE_STR)

BIN
doc/opencv2manager.pdf Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -366,6 +366,9 @@ bool validateData(const ChessBoardGenerator& cbg, const Size& imgSz,
bool CV_ChessboardDetectorTest::checkByGenerator()
{
bool res = true;
// for some reason, this test sometimes fails on Ubuntu
#if (defined __APPLE__ && defined __x86_64__) || defined _MSC_VER
//theRNG() = 0x58e6e895b9913160;
//cv::DefaultRngAuto dra;
//theRNG() = *ts->get_rng();
@ -464,6 +467,7 @@ bool CV_ChessboardDetectorTest::checkByGenerator()
cv::drawChessboardCorners(cb, cbg.cornersSize(), Mat(corners_found), found);
}
#endif
return res;
}

View File

@ -806,6 +806,7 @@ struct Mutex::Impl
int refcount;
};
#ifndef __GNUC__
int _interlockedExchangeAdd(int* addr, int delta)
{
#if defined _MSC_VER && _MSC_VER >= 1500
@ -814,6 +815,7 @@ int _interlockedExchangeAdd(int* addr, int delta)
return (int)InterlockedExchangeAdd((long volatile*)addr, delta);
#endif
}
#endif // __GNUC__
#elif defined __APPLE__

View File

@ -69,7 +69,7 @@ protected:
bool SomeMatFunctions();
bool TestMat();
template<typename _Tp> void TestType(Size sz, _Tp value=_Tp(1.f));
template<typename _Tp> void TestType(Size sz, _Tp value);
bool TestTemplateMat();
bool TestMatND();
bool TestSparseMat();
@ -116,9 +116,12 @@ template<typename _Tp> void CV_OperationsTest::TestType(Size sz, _Tp value)
m.elemSize() == sizeof(_Tp) && m.step == m.elemSize()*m.cols);
for( int y = 0; y < sz.height; y++ )
for( int x = 0; x < sz.width; x++ )
m(y, x) = value;
{
m(y,x) = value;
}
CV_Assert( sum(m.reshape(1,1))[0] == (double)sz.width*sz.height );
double s = sum(Mat(m).reshape(1))[0];
CV_Assert( s == (double)sz.width*sz.height );
}
bool CV_OperationsTest::TestMat()
@ -795,15 +798,16 @@ bool CV_OperationsTest::TestTemplateMat()
}
CV_Assert( badarg_catched );
#include <iostream>
#include <opencv2/core/core.hpp>
Size size(2, 5);
TestType<float>(size);
TestType<cv::Vec3f>(size);
TestType<cv::Matx31f>(size);
TestType<cv::Matx41f>(size);
TestType<cv::Matx32f>(size);
TestType<float>(size, 1.f);
cv::Vec3f val1 = 1.f;
TestType<cv::Vec3f>(size, val1);
cv::Matx31f val2 = 1.f;
TestType<cv::Matx31f>(size, val2);
cv::Matx41f val3 = 1.f;
TestType<cv::Matx41f>(size, val3);
cv::Matx32f val4 = 1.f;
TestType<cv::Matx32f>(size, val4);
}
catch (const test_excep& e)
{

View File

@ -44,6 +44,10 @@ The references are:
#include "precomp.hpp"
#include "fast_score.hpp"
#if defined _MSC_VER
# pragma warning( disable : 4127)
#endif
namespace cv
{

View File

@ -31,8 +31,8 @@ PERF_TEST_P( TestWarpAffine, WarpAffine,
Size sz;
int borderMode, interType;
sz = get<0>(GetParam());
borderMode = get<1>(GetParam());
interType = get<2>(GetParam());
interType = get<1>(GetParam());
borderMode = get<2>(GetParam());
Mat src, img = imread(getDataPath("cv/shared/fruits.png"));
cvtColor(img, src, COLOR_BGR2RGBA, 4);
@ -58,8 +58,8 @@ PERF_TEST_P( TestWarpPerspective, WarpPerspective,
Size sz;
int borderMode, interType;
sz = get<0>(GetParam());
borderMode = get<1>(GetParam());
interType = get<2>(GetParam());
interType = get<1>(GetParam());
borderMode = get<2>(GetParam());
Mat src, img = imread(getDataPath("cv/shared/fruits.png"));
@ -98,9 +98,9 @@ PERF_TEST_P( TestWarpPerspectiveNear_t, WarpPerspectiveNear,
Size size;
int borderMode, interType, type;
size = get<0>(GetParam());
borderMode = get<1>(GetParam());
interType = get<2>(GetParam());
type = get<3>(GetParam());
interType = get<1>(GetParam());
borderMode = get<2>(GetParam());
type = get<3>(GetParam());
Mat src, img = imread(getDataPath("cv/shared/5MP.png"));
@ -120,10 +120,14 @@ PERF_TEST_P( TestWarpPerspectiveNear_t, WarpPerspectiveNear,
resize(src, src, size);
int shift = src.cols*0.04;
Mat srcVertices = (Mat_<Vec2f>(1, 4) << Vec2f(0, 0), Vec2f(size.width-1, 0),
Vec2f(size.width-1, size.height-1), Vec2f(0, size.height-1));
Mat dstVertices = (Mat_<Vec2f>(1, 4) << Vec2f(0, shift), Vec2f(size.width-shift/2, 0),
Vec2f(size.width-shift, size.height-shift), Vec2f(shift/2, size.height-1));
Mat srcVertices = (Mat_<Vec2f>(1, 4) << Vec2f(0, 0),
Vec2f(static_cast<float>(size.width-1), 0),
Vec2f(static_cast<float>(size.width-1), static_cast<float>(size.height-1)),
Vec2f(0, static_cast<float>(size.height-1)));
Mat dstVertices = (Mat_<Vec2f>(1, 4) << Vec2f(0, static_cast<float>(shift)),
Vec2f(static_cast<float>(size.width-shift/2), 0),
Vec2f(static_cast<float>(size.width-shift), static_cast<float>(size.height-shift)),
Vec2f(static_cast<float>(shift/2), static_cast<float>(size.height-1)));
Mat warpMat = getPerspectiveTransform(srcVertices, dstVertices);
Mat dst(size, type);

View File

@ -237,7 +237,7 @@ if(ANDROID)
set(lib_target_files ${ANDROID_LIB_PROJECT_FILES})
ocv_list_add_prefix(lib_target_files "${OpenCV_BINARY_DIR}/")
android_get_compatible_target(lib_target_sdk_target ${ANDROID_NATIVE_API_LEVEL} ${ANDROID_SDK_TARGET})
android_get_compatible_target(lib_target_sdk_target ${ANDROID_NATIVE_API_LEVEL} ${ANDROID_SDK_TARGET} 11)
configure_file("${CMAKE_CURRENT_SOURCE_DIR}/android_lib/${ANDROID_MANIFEST_FILE}" "${CMAKE_CURRENT_BINARY_DIR}/${ANDROID_MANIFEST_FILE}")

View File

@ -118,7 +118,7 @@ class AsyncServiceHelper
}
else
{
Log.d(TAG, "Wating current installation process");
Log.d(TAG, "Waiting current installation process");
InstallCallbackInterface WaitQuery = new InstallCallbackInterface() {
private LoaderCallbackInterface mUserAppCallback = Callback;
public String getPackageName()
@ -268,17 +268,21 @@ class AsyncServiceHelper
{
Log.d(TAG, "OpenCV package was not installed!");
mStatus = LoaderCallbackInterface.MARKET_ERROR;
Log.d(TAG, "Init finished with status " + mStatus);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(mStatus);
}
} catch (RemoteException e) {
e.printStackTrace();
mStatus = LoaderCallbackInterface.INIT_FAILED;
Log.d(TAG, "Init finished with status " + mStatus);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(mStatus);
}
Log.d(TAG, "Init finished with status " + mStatus);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(mStatus);
}
};

View File

@ -28,7 +28,7 @@ public abstract class BaseLoaderCallback implements LoaderCallbackInterface {
/** OpenCV loader can not start Google Play Market. **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not installed! You can get it here");
Log.e(TAG, "Package installation failed!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Package installation failed!");

View File

@ -0,0 +1,335 @@
package org.opencv.android;
import java.util.List;
import org.opencv.android.Utils;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.highgui.Highgui;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
/**
* This is a basic class, implementing the interaction with Camera and OpenCV library.
* The main responsibility of it - is to control when camera can be enabled, process the frame,
* call external listener to make any adjustments to the frame and then draw the resulting
* frame to the screen.
* The clients shall implement CvCameraViewListener
* TODO: add method to control the format in which the frames will be delivered to CvCameraViewListener
*/
public abstract class CameraBridgeViewBase extends SurfaceView implements SurfaceHolder.Callback {
private static final int MAX_UNSPECIFIED = -1;
protected int mFrameWidth;
protected int mFrameHeight;
protected int mMaxHeight;
protected int mMaxWidth;
protected int mPreviewFormat = Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA;
private Bitmap mCacheBitmap;
public CameraBridgeViewBase(Context context, AttributeSet attrs) {
super(context, attrs);
getHolder().addCallback(this);
mMaxWidth = MAX_UNSPECIFIED;
mMaxHeight = MAX_UNSPECIFIED;
}
public interface CvCameraViewListener {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* @param width - the width of the frames that will be delivered
* @param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when delivery of the frame needs to be done.
* The returned values - is a modified frame which needs to be displayed on the screen.
* TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc)
*/
public Mat onCameraFrame(Mat inputFrame);
}
private static final int STOPPED = 0;
private static final int STARTED = 1;
private static final String TAG = "CameraBridge";
private CvCameraViewListener mListener;
private int mState = STOPPED;
private boolean mEnabled;
private boolean mSurfaceExist;
private Object mSyncObject = new Object();
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
synchronized(mSyncObject) {
if (!mSurfaceExist) {
mSurfaceExist = true;
checkCurrentState();
} else {
/** Surface changed. We need to stop camera and restart with new parameters */
/* Pretend that old surface has been destroyed */
mSurfaceExist = false;
checkCurrentState();
/* Now use new surface. Say we have it now */
mSurfaceExist = true;
checkCurrentState();
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
/* Do nothing. Wait until surfaceChanged delivered */
}
public void surfaceDestroyed(SurfaceHolder holder) {
synchronized(mSyncObject) {
mSurfaceExist = false;
checkCurrentState();
}
}
/**
* This method is provided for clients, so they can enable the camera connection.
* The actual onCameraViewStarted callback will be delivered only after both this method is called and surface is available
*/
public void enableView() {
synchronized(mSyncObject) {
mEnabled = true;
checkCurrentState();
}
}
/**
* This method is provided for clients, so they can disable camera connection and stop
* the delivery of frames even though the surface view itself is not destroyed and still stays on the scren
*/
public void disableView() {
synchronized(mSyncObject) {
mEnabled = false;
checkCurrentState();
}
}
public void setCvCameraViewListener(CvCameraViewListener listener) {
mListener = listener;
}
/**
* This method sets the maximum size that camera frame is allowed to be. When selecting
* size - the biggest size which less or equal the size set will be selected.
* As an example - we set setMaxFrameSize(200,200) and we have 176x152 and 320x240 sizes. The
* preview frame will be selected with 176x152 size.
* This method is useful when need to restrict the size of preview frame for some reason (for example for video recording)
* @param maxWidth - the maximum width allowed for camera frame.
* @param maxHeight - the maximum height allowed for camera frame
*/
public void setMaxFrameSize(int maxWidth, int maxHeight) {
mMaxWidth = maxWidth;
mMaxHeight = maxHeight;
}
public void SetCaptureFormat(int format)
{
mPreviewFormat = format;
}
/**
* Called when mSyncObject lock is held
*/
private void checkCurrentState() {
int targetState;
if (mEnabled && mSurfaceExist) {
targetState = STARTED;
} else {
targetState = STOPPED;
}
if (targetState != mState) {
/* The state change detected. Need to exit the current state and enter target state */
processExitState(mState);
mState = targetState;
processEnterState(mState);
}
}
private void processEnterState(int state) {
switch(state) {
case STARTED:
onEnterStartedState();
if (mListener != null) {
mListener.onCameraViewStarted(mFrameWidth, mFrameHeight);
}
break;
case STOPPED:
onEnterStoppedState();
if (mListener != null) {
mListener.onCameraViewStopped();
}
break;
};
}
private void processExitState(int state) {
switch(state) {
case STARTED:
onExitStartedState();
break;
case STOPPED:
onExitStoppedState();
break;
};
}
private void onEnterStoppedState() {
/* nothing to do */
}
private void onExitStoppedState() {
/* nothing to do */
}
// NOTE: The order of bitmap constructor and camera connection is important for android 4.1.x
// Bitmap must be constructed before surface
private void onEnterStartedState() {
/* Connect camera */
if (!connectCamera(getWidth(), getHeight())) {
AlertDialog ad = new AlertDialog.Builder(getContext()).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("It seems that you device does not support camera (or it is locked). Application will be closed.");
ad.setButton(DialogInterface.BUTTON_NEUTRAL, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
((Activity) getContext()).finish();
}
});
ad.show();
}
}
private void onExitStartedState() {
disconnectCamera();
if (mCacheBitmap != null) {
mCacheBitmap.recycle();
}
}
/**
* This method shall be called by the subclasses when they have valid
* object and want it to be delivered to external client (via callback) and
* then displayed on the screen.
* @param frame - the current frame to be delivered
*/
protected void deliverAndDrawFrame(Mat frame) {
Mat modified;
if (mListener != null) {
modified = mListener.onCameraFrame(frame);
} else {
modified = frame;
}
boolean bmpValid = true;
if (modified != null) {
try {
Utils.matToBitmap(modified, mCacheBitmap);
} catch(Exception e) {
Log.e(TAG, "Mat type: " + modified);
Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmpValid = false;
}
}
if (bmpValid && mCacheBitmap != null) {
Canvas canvas = getHolder().lockCanvas();
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
canvas.drawBitmap(mCacheBitmap, (canvas.getWidth() - mCacheBitmap.getWidth()) / 2, (canvas.getHeight() - mCacheBitmap.getHeight()) / 2, null);
getHolder().unlockCanvasAndPost(canvas);
}
}
}
/**
* This method is invoked shall perform concrete operation to initialize the camera.
* CONTRACT: as a result of this method variables mFrameWidth and mFrameHeight MUST be
* initialized with the size of the Camera frames that will be delivered to external processor.
* @param width - the width of this SurfaceView
* @param height - the height of this SurfaceView
*/
protected abstract boolean connectCamera(int width, int height);
/**
* Disconnects and release the particular camera object being connected to this surface view.
* Called when syncObject lock is held
*/
protected abstract void disconnectCamera();
// NOTE: On Android 4.1.x the function must be called before SurfaceTextre constructor!
protected void AllocateCache()
{
mCacheBitmap = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888);
}
public interface ListItemAccessor {
public int getWidth(Object obj);
public int getHeight(Object obj);
};
/**
* This helper method can be called by subclasses to select camera preview size.
* It goes over the list of the supported preview sizes and selects the maximum one which
* fits both values set via setMaxFrameSize() and surface frame allocated for this view
* @param supportedSizes
* @param surfaceWidth
* @param surfaceHeight
* @return
*/
protected Size calculateCameraFrameSize(List<?> supportedSizes, ListItemAccessor accessor, int surfaceWidth, int surfaceHeight) {
int calcWidth = 0;
int calcHeight = 0;
int maxAllowedWidth = (mMaxWidth != MAX_UNSPECIFIED && mMaxWidth < surfaceWidth)? mMaxWidth : surfaceWidth;
int maxAllowedHeight = (mMaxHeight != MAX_UNSPECIFIED && mMaxHeight < surfaceHeight)? mMaxHeight : surfaceHeight;
for (Object size : supportedSizes) {
int width = accessor.getWidth(size);
int height = accessor.getHeight(size);
if (width <= maxAllowedWidth && height <= maxAllowedHeight) {
if (width >= calcWidth && height >= calcHeight) {
calcWidth = (int) width;
calcHeight = (int) height;
}
}
}
return new Size(calcWidth, calcHeight);
}
}

View File

@ -0,0 +1,242 @@
package org.opencv.android;
import java.io.IOException;
import java.util.List;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.os.Build;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.highgui.Highgui;
import org.opencv.imgproc.Imgproc;
/**
* This class is an implementation of the Bridge View between OpenCv and JAVA Camera.
* This class relays on the functionality available in base class and only implements
* required functions:
* connectCamera - opens Java camera and sets the PreviewCallback to be delivered.
* disconnectCamera - closes the camera and stops preview.
* When frame is delivered via callback from Camera - it processed via OpenCV to be
* converted to RGBA32 and then passed to the external callback for modifications if required.
*/
public class JavaCameraView extends CameraBridgeViewBase implements PreviewCallback {
private static final int MAGIC_TEXTURE_ID = 10;
private static final String TAG = "JavaCameraView";
private Mat mBaseMat;
private byte mBuffer[];
private Thread mThread;
private boolean mStopThread;
public static class JavaCameraSizeAccessor implements ListItemAccessor {
public int getWidth(Object obj) {
Camera.Size size = (Camera.Size) obj;
return size.width;
}
public int getHeight(Object obj) {
Camera.Size size = (Camera.Size) obj;
return size.height;
}
}
private Camera mCamera;
public JavaCameraView(Context context, AttributeSet attrs) {
super(context, attrs);
Log.d(TAG, "Java camera view ctor");
}
@TargetApi(11)
protected boolean initializeCamera(int width, int height) {
Log.d(TAG, "Initialize java camera");
synchronized (this) {
mCamera = null;
Log.d(TAG, "Trying to open camera with old open()");
try {
mCamera = Camera.open();
}
catch (Exception e){
Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
}
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
boolean connected = false;
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(camIdx) + ")");
try {
mCamera = Camera.open(camIdx);
connected = true;
} catch (RuntimeException e) {
Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
}
if (connected) break;
}
}
if (mCamera == null)
return false;
/* Now set camera parameters */
try {
Camera.Parameters params = mCamera.getParameters();
Log.d(TAG, "getSupportedPreviewSizes()");
List<android.hardware.Camera.Size> sizes = params.getSupportedPreviewSizes();
/* Select the size that fits surface considering maximum size allowed */
Size frameSize = calculateCameraFrameSize(sizes, new JavaCameraSizeAccessor(), width, height);
params.setPreviewFormat(ImageFormat.NV21);
params.setPreviewSize((int)frameSize.width, (int)frameSize.height);
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
params = mCamera.getParameters();
mFrameWidth = params.getPreviewSize().width;
mFrameHeight = params.getPreviewSize().height;
int size = mFrameWidth * mFrameHeight;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
mCamera.addCallbackBuffer(mBuffer);
mCamera.setPreviewCallbackWithBuffer(this);
mBaseMat = new Mat(mFrameHeight + (mFrameHeight/2), mFrameWidth, CvType.CV_8UC1);
AllocateCache();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
SurfaceTexture tex = new SurfaceTexture(MAGIC_TEXTURE_ID);
getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mCamera.setPreviewTexture(tex);
} else
mCamera.setPreviewDisplay(null);
} catch (IOException e) {
e.printStackTrace();
}
/* Finally we are ready to start the preview */
Log.d(TAG, "startPreview");
mCamera.startPreview();
}
return true;
}
protected void releaseCamera() {
synchronized (this) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
@Override
protected boolean connectCamera(int width, int height) {
/* 1. We need to instantiate camera
* 2. We need to start thread which will be getting frames
*/
/* First step - initialize camera connection */
Log.d(TAG, "Connecting to camera");
if (!initializeCamera(getWidth(), getHeight()))
return false;
/* now we can start update thread */
Log.d(TAG, "Starting processing thread");
mStopThread = false;
mThread = new Thread(new CameraWorker());
mThread.start();
return true;
}
protected void disconnectCamera() {
/* 1. We need to stop thread which updating the frames
* 2. Stop camera and release it
*/
Log.d(TAG, "Disconnecting from camera");
try {
mStopThread = true;
Log.d(TAG, "Notify thread");
synchronized (this) {
this.notify();
}
Log.d(TAG, "Wating for thread");
mThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
mThread = null;
}
/* Now release camera */
releaseCamera();
}
public void onPreviewFrame(byte[] frame, Camera arg1) {
Log.i(TAG, "Preview Frame received. Need to create MAT and deliver it to clients");
Log.i(TAG, "Frame size is " + frame.length);
synchronized (this)
{
mBaseMat.put(0, 0, frame);
this.notify();
}
if (mCamera != null)
mCamera.addCallbackBuffer(mBuffer);
}
private class CameraWorker implements Runnable {
public void run() {
do {
synchronized (JavaCameraView.this) {
try {
JavaCameraView.this.wait();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
if (!mStopThread) {
Mat frameMat = new Mat();
switch (mPreviewFormat) {
case Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA:
Imgproc.cvtColor(mBaseMat, frameMat, Imgproc.COLOR_YUV2RGBA_NV21, 4);
break;
case Highgui.CV_CAP_ANDROID_GREY_FRAME:
frameMat = mBaseMat.submat(0, mFrameHeight, 0, mFrameWidth);
break;
default:
Log.e(TAG, "Invalid frame format! Only RGBA and Gray Scale are supported!");
};
deliverAndDrawFrame(frameMat);
frameMat.release();
}
} while (!mStopThread);
Log.d(TAG, "Finish processing thread");
}
}
}

View File

@ -0,0 +1,145 @@
package org.opencv.android;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.highgui.Highgui;
import org.opencv.highgui.VideoCapture;
import android.content.Context;
import android.util.AttributeSet;
import android.util.Log;
/**
* This class is an implementation of a bridge between SurfaceView and native OpenCV camera.
* Due to the big amount of work done, by the base class this child is only responsible
* for creating camera, destroying camera and delivering frames while camera is enabled
*/
public class NativeCameraView extends CameraBridgeViewBase {
public static final String TAG = "NativeCameraView";
private boolean mStopThread;
private Thread mThread;
private VideoCapture mCamera;
public NativeCameraView(Context context, AttributeSet attrs) {
super(context, attrs);
}
@Override
protected boolean connectCamera(int width, int height) {
/* 1. We need to instantiate camera
* 2. We need to start thread which will be getting frames
*/
/* First step - initialize camera connection */
if (!initializeCamera(getWidth(), getHeight()))
return false;
/* now we can start update thread */
mThread = new Thread(new CameraWorker());
mThread.start();
return true;
}
@Override
protected void disconnectCamera() {
/* 1. We need to stop thread which updating the frames
* 2. Stop camera and release it
*/
try {
mStopThread = true;
mThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
mThread = null;
mStopThread = false;
}
/* Now release camera */
releaseCamera();
}
public static class OpenCvSizeAccessor implements ListItemAccessor {
public int getWidth(Object obj) {
Size size = (Size)obj;
return (int)size.width;
}
public int getHeight(Object obj) {
Size size = (Size)obj;
return (int)size.height;
}
}
private boolean initializeCamera(int width, int height) {
synchronized (this) {
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (mCamera == null)
return false;
//TODO: improve error handling
java.util.List<Size> sizes = mCamera.getSupportedPreviewSizes();
/* Select the size that fits surface considering maximum size allowed */
Size frameSize = calculateCameraFrameSize(sizes, new OpenCvSizeAccessor(), width, height);
mFrameWidth = (int)frameSize.width;
mFrameHeight = (int)frameSize.height;
AllocateCache();
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, frameSize.width);
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, frameSize.height);
}
Log.i(TAG, "Selected camera frame size = (" + mFrameWidth + ", " + mFrameHeight + ")");
return true;
}
private void releaseCamera() {
synchronized (this) {
if (mCamera != null) {
mCamera.release();
}
}
}
private class CameraWorker implements Runnable {
private Mat mRgba = new Mat();
private Mat mGray = new Mat();
public void run() {
do {
if (!mCamera.grab()) {
Log.e(TAG, "Camera frame grab failed");
break;
}
switch (mPreviewFormat) {
case Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA:
{
mCamera.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
deliverAndDrawFrame(mRgba);
} break;
case Highgui.CV_CAP_ANDROID_GREY_FRAME:
mCamera.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
deliverAndDrawFrame(mGray);
break;
default:
Log.e(TAG, "Invalid frame format! Only RGBA and Gray Scale are supported!");
}
} while (!mStopThread);
}
}
}

View File

@ -12,6 +12,11 @@ public class OpenCVLoader
*/
public static final String OPENCV_VERSION_2_4_2 = "2.4.2";
/**
* OpenCV Library version 2.4.3.
*/
public static final String OPENCV_VERSION_2_4_3 = "2.4.3";
/**
* Loads and initializes OpenCV library from current application package. Roughly, it's an analog of system.loadLibrary("opencv_java").
* @return Returns true is initialization of OpenCV was successful.

View File

@ -1,176 +0,0 @@
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// Intel License Agreement
//
// Copyright (C) 2000, Intel Corporation, all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of Intel Corporation may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "precomp.hpp"
/*======================= KALMAN FILTER AS TRACKER =========================*/
/* State vector is (x,y,w,h,dx,dy,dw,dh). */
/* Measurement is (x,y,w,h) */
/* Dynamic matrix A: */
const float A8[] = { 1, 0, 0, 0, 1, 0, 0, 0,
0, 1, 0, 0, 0, 1, 0, 0,
0, 0, 1, 0, 0, 0, 1, 0,
0, 0, 0, 1, 0, 0, 0, 1,
0, 0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 0, 1};
/* Measurement matrix H: */
const float H8[] = { 1, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0, 0};
/* Matices for zero size velocity: */
/* Dynamic matrix A: */
const float A6[] = { 1, 0, 0, 0, 1, 0,
0, 1, 0, 0, 0, 1,
0, 0, 1, 0, 0, 0,
0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 1};
/* Measurement matrix H: */
const float H6[] = { 1, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0,
0, 0, 1, 0, 0, 0,
0, 0, 0, 1, 0, 0};
#define STATE_NUM 6
#define A A6
#define H H6
class CvBlobTrackerOneKalman:public CvBlobTrackerOne
{
private:
CvBlob m_Blob;
CvKalman* m_pKalman;
int m_Frame;
public:
CvBlobTrackerOneKalman()
{
m_Frame = 0;
m_pKalman = cvCreateKalman(STATE_NUM,4);
memcpy( m_pKalman->transition_matrix->data.fl, A, sizeof(A));
memcpy( m_pKalman->measurement_matrix->data.fl, H, sizeof(H));
cvSetIdentity( m_pKalman->process_noise_cov, cvRealScalar(1e-5) );
cvSetIdentity( m_pKalman->measurement_noise_cov, cvRealScalar(1e-1) );
// CV_MAT_ELEM(*m_pKalman->measurement_noise_cov, float, 2,2) *= (float)pow(20,2);
// CV_MAT_ELEM(*m_pKalman->measurement_noise_cov, float, 3,3) *= (float)pow(20,2);
cvSetIdentity( m_pKalman->error_cov_post, cvRealScalar(1));
cvZero(m_pKalman->state_post);
cvZero(m_pKalman->state_pre);
SetModuleName("Kalman");
}
~CvBlobTrackerOneKalman()
{
cvReleaseKalman(&m_pKalman);
}
virtual void Init(CvBlob* pBlob, IplImage* /*pImg*/, IplImage* /*pImgFG*/ = NULL)
{
m_Blob = pBlob[0];
m_pKalman->state_post->data.fl[0] = CV_BLOB_X(pBlob);
m_pKalman->state_post->data.fl[1] = CV_BLOB_Y(pBlob);
m_pKalman->state_post->data.fl[2] = CV_BLOB_WX(pBlob);
m_pKalman->state_post->data.fl[3] = CV_BLOB_WY(pBlob);
}
virtual CvBlob* Process(CvBlob* pBlob, IplImage* /*pImg*/, IplImage* /*pImgFG*/ = NULL)
{
CvBlob* pBlobRes = &m_Blob;
float Z[4];
CvMat Zmat = cvMat(4,1,CV_32F,Z);
m_Blob = pBlob[0];
if(m_Frame < 2)
{ /* First call: */
m_pKalman->state_post->data.fl[0+4] = CV_BLOB_X(pBlob)-m_pKalman->state_post->data.fl[0];
m_pKalman->state_post->data.fl[1+4] = CV_BLOB_Y(pBlob)-m_pKalman->state_post->data.fl[1];
if(m_pKalman->DP>6)
{
m_pKalman->state_post->data.fl[2+4] = CV_BLOB_WX(pBlob)-m_pKalman->state_post->data.fl[2];
m_pKalman->state_post->data.fl[3+4] = CV_BLOB_WY(pBlob)-m_pKalman->state_post->data.fl[3];
}
m_pKalman->state_post->data.fl[0] = CV_BLOB_X(pBlob);
m_pKalman->state_post->data.fl[1] = CV_BLOB_Y(pBlob);
m_pKalman->state_post->data.fl[2] = CV_BLOB_WX(pBlob);
m_pKalman->state_post->data.fl[3] = CV_BLOB_WY(pBlob);
memcpy(m_pKalman->state_pre->data.fl,m_pKalman->state_post->data.fl,sizeof(float)*STATE_NUM);
}
else
{ /* Another call: */
Z[0] = CV_BLOB_X(pBlob);
Z[1] = CV_BLOB_Y(pBlob);
Z[2] = CV_BLOB_WX(pBlob);
Z[3] = CV_BLOB_WY(pBlob);
cvKalmanCorrect(m_pKalman,&Zmat);
cvKalmanPredict(m_pKalman,0);
cvMatMulAdd(m_pKalman->measurement_matrix, m_pKalman->state_pre, NULL, &Zmat);
CV_BLOB_X(pBlobRes) = Z[0];
CV_BLOB_Y(pBlobRes) = Z[1];
CV_BLOB_WX(pBlobRes) = Z[2];
CV_BLOB_WY(pBlobRes) = Z[3];
}
m_Frame++;
return pBlobRes;
}
virtual void Release()
{
delete this;
}
}; /* class CvBlobTrackerOneKalman */
#if 0
static CvBlobTrackerOne* cvCreateModuleBlobTrackerOneKalman()
{
return (CvBlobTrackerOne*) new CvBlobTrackerOneKalman;
}
CvBlobTracker* cvCreateBlobTrackerKalman()
{
return cvCreateBlobTrackerList(cvCreateModuleBlobTrackerOneKalman);
}
#endif

View File

@ -54,6 +54,9 @@
#if CV_AVX
# define CV_HAAR_USE_AVX 1
# if defined _MSC_VER
# pragma warning( disable : 4752 )
# endif
#else
# if CV_SSE2 || CV_SSE3
# define CV_HAAR_USE_SSE 1
@ -412,6 +415,9 @@ icvCreateHidHaarClassifierCascade( CvHaarClassifierCascade* cascade )
#define calc_sum(rect,offset) \
((rect).p0[offset] - (rect).p1[offset] - (rect).p2[offset] + (rect).p3[offset])
#define calc_sumf(rect,offset) \
static_cast<float>((rect).p0[offset] - (rect).p1[offset] - (rect).p2[offset] + (rect).p3[offset])
CV_IMPL void
cvSetImagesForHaarClassifierCascade( CvHaarClassifierCascade* _cascade,
@ -652,7 +658,7 @@ double icvEvalHidHaarClassifierAVX( CvHidHaarClassifier* classifier,
nodes[6] = (classifier+6)->node + idxV[6];
nodes[7] = (classifier+7)->node + idxV[7];
__m256 t = _mm256_set1_ps(variance_norm_factor);
__m256 t = _mm256_set1_ps(static_cast<float>(variance_norm_factor));
t = _mm256_mul_ps(t, _mm256_set_ps(nodes[7]->threshold,
nodes[6]->threshold,
@ -663,14 +669,14 @@ double icvEvalHidHaarClassifierAVX( CvHidHaarClassifier* classifier,
nodes[1]->threshold,
nodes[0]->threshold));
__m256 offset = _mm256_set_ps(calc_sum(nodes[7]->feature.rect[0], p_offset),
calc_sum(nodes[6]->feature.rect[0], p_offset),
calc_sum(nodes[5]->feature.rect[0], p_offset),
calc_sum(nodes[4]->feature.rect[0], p_offset),
calc_sum(nodes[3]->feature.rect[0], p_offset),
calc_sum(nodes[2]->feature.rect[0], p_offset),
calc_sum(nodes[1]->feature.rect[0], p_offset),
calc_sum(nodes[0]->feature.rect[0], p_offset));
__m256 offset = _mm256_set_ps(calc_sumf(nodes[7]->feature.rect[0], p_offset),
calc_sumf(nodes[6]->feature.rect[0], p_offset),
calc_sumf(nodes[5]->feature.rect[0], p_offset),
calc_sumf(nodes[4]->feature.rect[0], p_offset),
calc_sumf(nodes[3]->feature.rect[0], p_offset),
calc_sumf(nodes[2]->feature.rect[0], p_offset),
calc_sumf(nodes[1]->feature.rect[0], p_offset),
calc_sumf(nodes[0]->feature.rect[0], p_offset));
__m256 weight = _mm256_set_ps(nodes[7]->feature.rect[0].weight,
nodes[6]->feature.rect[0].weight,
@ -683,14 +689,14 @@ double icvEvalHidHaarClassifierAVX( CvHidHaarClassifier* classifier,
__m256 sum = _mm256_mul_ps(offset, weight);
offset = _mm256_set_ps(calc_sum(nodes[7]->feature.rect[1], p_offset),
calc_sum(nodes[6]->feature.rect[1], p_offset),
calc_sum(nodes[5]->feature.rect[1], p_offset),
calc_sum(nodes[4]->feature.rect[1], p_offset),
calc_sum(nodes[3]->feature.rect[1], p_offset),
calc_sum(nodes[2]->feature.rect[1], p_offset),
calc_sum(nodes[1]->feature.rect[1], p_offset),
calc_sum(nodes[0]->feature.rect[1], p_offset));
offset = _mm256_set_ps(calc_sumf(nodes[7]->feature.rect[1], p_offset),
calc_sumf(nodes[6]->feature.rect[1], p_offset),
calc_sumf(nodes[5]->feature.rect[1], p_offset),
calc_sumf(nodes[4]->feature.rect[1], p_offset),
calc_sumf(nodes[3]->feature.rect[1], p_offset),
calc_sumf(nodes[2]->feature.rect[1], p_offset),
calc_sumf(nodes[1]->feature.rect[1], p_offset),
calc_sumf(nodes[0]->feature.rect[1], p_offset));
weight = _mm256_set_ps(nodes[7]->feature.rect[1].weight,
nodes[6]->feature.rect[1].weight,
@ -704,21 +710,21 @@ double icvEvalHidHaarClassifierAVX( CvHidHaarClassifier* classifier,
sum = _mm256_add_ps(sum, _mm256_mul_ps(offset, weight));
if( nodes[0]->feature.rect[2].p0 )
tmp[0] = calc_sum(nodes[0]->feature.rect[2], p_offset) * nodes[0]->feature.rect[2].weight;
tmp[0] = calc_sumf(nodes[0]->feature.rect[2], p_offset) * nodes[0]->feature.rect[2].weight;
if( nodes[1]->feature.rect[2].p0 )
tmp[1] = calc_sum(nodes[1]->feature.rect[2], p_offset) * nodes[1]->feature.rect[2].weight;
tmp[1] = calc_sumf(nodes[1]->feature.rect[2], p_offset) * nodes[1]->feature.rect[2].weight;
if( nodes[2]->feature.rect[2].p0 )
tmp[2] = calc_sum(nodes[2]->feature.rect[2], p_offset) * nodes[2]->feature.rect[2].weight;
tmp[2] = calc_sumf(nodes[2]->feature.rect[2], p_offset) * nodes[2]->feature.rect[2].weight;
if( nodes[3]->feature.rect[2].p0 )
tmp[3] = calc_sum(nodes[3]->feature.rect[2], p_offset) * nodes[3]->feature.rect[2].weight;
tmp[3] = calc_sumf(nodes[3]->feature.rect[2], p_offset) * nodes[3]->feature.rect[2].weight;
if( nodes[4]->feature.rect[2].p0 )
tmp[4] = calc_sum(nodes[4]->feature.rect[2], p_offset) * nodes[4]->feature.rect[2].weight;
tmp[4] = calc_sumf(nodes[4]->feature.rect[2], p_offset) * nodes[4]->feature.rect[2].weight;
if( nodes[5]->feature.rect[2].p0 )
tmp[5] = calc_sum(nodes[5]->feature.rect[2], p_offset) * nodes[5]->feature.rect[2].weight;
tmp[5] = calc_sumf(nodes[5]->feature.rect[2], p_offset) * nodes[5]->feature.rect[2].weight;
if( nodes[6]->feature.rect[2].p0 )
tmp[6] = calc_sum(nodes[6]->feature.rect[2], p_offset) * nodes[6]->feature.rect[2].weight;
tmp[6] = calc_sumf(nodes[6]->feature.rect[2], p_offset) * nodes[6]->feature.rect[2].weight;
if( nodes[7]->feature.rect[2].p0 )
tmp[7] = calc_sum(nodes[7]->feature.rect[2], p_offset) * nodes[7]->feature.rect[2].weight;
tmp[7] = calc_sumf(nodes[7]->feature.rect[2], p_offset) * nodes[7]->feature.rect[2].weight;
sum = _mm256_add_ps(sum,_mm256_load_ps(tmp));
@ -918,7 +924,7 @@ cvRunHaarClassifierCascadeSum( const CvHaarClassifierCascade* _cascade,
classifiers[7] = cascade->stage_classifier[i].classifier + j + 7;
nodes[7] = classifiers[7]->node;
__m256 t = _mm256_set1_ps(variance_norm_factor);
__m256 t = _mm256_set1_ps(static_cast<float>(variance_norm_factor));
t = _mm256_mul_ps(t, _mm256_set_ps(nodes[7]->threshold,
nodes[6]->threshold,
nodes[5]->threshold,
@ -928,14 +934,14 @@ cvRunHaarClassifierCascadeSum( const CvHaarClassifierCascade* _cascade,
nodes[1]->threshold,
nodes[0]->threshold));
__m256 offset = _mm256_set_ps(calc_sum(nodes[7]->feature.rect[0], p_offset),
calc_sum(nodes[6]->feature.rect[0], p_offset),
calc_sum(nodes[5]->feature.rect[0], p_offset),
calc_sum(nodes[4]->feature.rect[0], p_offset),
calc_sum(nodes[3]->feature.rect[0], p_offset),
calc_sum(nodes[2]->feature.rect[0], p_offset),
calc_sum(nodes[1]->feature.rect[0], p_offset),
calc_sum(nodes[0]->feature.rect[0], p_offset));
__m256 offset = _mm256_set_ps(calc_sumf(nodes[7]->feature.rect[0], p_offset),
calc_sumf(nodes[6]->feature.rect[0], p_offset),
calc_sumf(nodes[5]->feature.rect[0], p_offset),
calc_sumf(nodes[4]->feature.rect[0], p_offset),
calc_sumf(nodes[3]->feature.rect[0], p_offset),
calc_sumf(nodes[2]->feature.rect[0], p_offset),
calc_sumf(nodes[1]->feature.rect[0], p_offset),
calc_sumf(nodes[0]->feature.rect[0], p_offset));
__m256 weight = _mm256_set_ps(nodes[7]->feature.rect[0].weight,
nodes[6]->feature.rect[0].weight,
@ -948,14 +954,14 @@ cvRunHaarClassifierCascadeSum( const CvHaarClassifierCascade* _cascade,
__m256 sum = _mm256_mul_ps(offset, weight);
offset = _mm256_set_ps(calc_sum(nodes[7]->feature.rect[1], p_offset),
calc_sum(nodes[6]->feature.rect[1], p_offset),
calc_sum(nodes[5]->feature.rect[1], p_offset),
calc_sum(nodes[4]->feature.rect[1], p_offset),
calc_sum(nodes[3]->feature.rect[1], p_offset),
calc_sum(nodes[2]->feature.rect[1], p_offset),
calc_sum(nodes[1]->feature.rect[1], p_offset),
calc_sum(nodes[0]->feature.rect[1], p_offset));
offset = _mm256_set_ps(calc_sumf(nodes[7]->feature.rect[1], p_offset),
calc_sumf(nodes[6]->feature.rect[1], p_offset),
calc_sumf(nodes[5]->feature.rect[1], p_offset),
calc_sumf(nodes[4]->feature.rect[1], p_offset),
calc_sumf(nodes[3]->feature.rect[1], p_offset),
calc_sumf(nodes[2]->feature.rect[1], p_offset),
calc_sumf(nodes[1]->feature.rect[1], p_offset),
calc_sumf(nodes[0]->feature.rect[1], p_offset));
weight = _mm256_set_ps(nodes[7]->feature.rect[1].weight,
nodes[6]->feature.rect[1].weight,
@ -1023,7 +1029,7 @@ cvRunHaarClassifierCascadeSum( const CvHaarClassifierCascade* _cascade,
classifiers[7] = cascade->stage_classifier[i].classifier + j + 7;
nodes[7] = classifiers[7]->node;
__m256 t = _mm256_set1_ps(variance_norm_factor);
__m256 t = _mm256_set1_ps(static_cast<float>(variance_norm_factor));
t = _mm256_mul_ps(t, _mm256_set_ps(nodes[7]->threshold,
nodes[6]->threshold,
@ -1034,14 +1040,14 @@ cvRunHaarClassifierCascadeSum( const CvHaarClassifierCascade* _cascade,
nodes[1]->threshold,
nodes[0]->threshold));
__m256 offset = _mm256_set_ps(calc_sum(nodes[7]->feature.rect[0], p_offset),
calc_sum(nodes[6]->feature.rect[0], p_offset),
calc_sum(nodes[5]->feature.rect[0], p_offset),
calc_sum(nodes[4]->feature.rect[0], p_offset),
calc_sum(nodes[3]->feature.rect[0], p_offset),
calc_sum(nodes[2]->feature.rect[0], p_offset),
calc_sum(nodes[1]->feature.rect[0], p_offset),
calc_sum(nodes[0]->feature.rect[0], p_offset));
__m256 offset = _mm256_set_ps(calc_sumf(nodes[7]->feature.rect[0], p_offset),
calc_sumf(nodes[6]->feature.rect[0], p_offset),
calc_sumf(nodes[5]->feature.rect[0], p_offset),
calc_sumf(nodes[4]->feature.rect[0], p_offset),
calc_sumf(nodes[3]->feature.rect[0], p_offset),
calc_sumf(nodes[2]->feature.rect[0], p_offset),
calc_sumf(nodes[1]->feature.rect[0], p_offset),
calc_sumf(nodes[0]->feature.rect[0], p_offset));
__m256 weight = _mm256_set_ps(nodes[7]->feature.rect[0].weight,
nodes[6]->feature.rect[0].weight,
@ -1054,14 +1060,14 @@ cvRunHaarClassifierCascadeSum( const CvHaarClassifierCascade* _cascade,
__m256 sum = _mm256_mul_ps(offset, weight);
offset = _mm256_set_ps(calc_sum(nodes[7]->feature.rect[1], p_offset),
calc_sum(nodes[6]->feature.rect[1], p_offset),
calc_sum(nodes[5]->feature.rect[1], p_offset),
calc_sum(nodes[4]->feature.rect[1], p_offset),
calc_sum(nodes[3]->feature.rect[1], p_offset),
calc_sum(nodes[2]->feature.rect[1], p_offset),
calc_sum(nodes[1]->feature.rect[1], p_offset),
calc_sum(nodes[0]->feature.rect[1], p_offset));
offset = _mm256_set_ps(calc_sumf(nodes[7]->feature.rect[1], p_offset),
calc_sumf(nodes[6]->feature.rect[1], p_offset),
calc_sumf(nodes[5]->feature.rect[1], p_offset),
calc_sumf(nodes[4]->feature.rect[1], p_offset),
calc_sumf(nodes[3]->feature.rect[1], p_offset),
calc_sumf(nodes[2]->feature.rect[1], p_offset),
calc_sumf(nodes[1]->feature.rect[1], p_offset),
calc_sumf(nodes[0]->feature.rect[1], p_offset));
weight = _mm256_set_ps(nodes[7]->feature.rect[1].weight,
nodes[6]->feature.rect[1].weight,
@ -1075,21 +1081,21 @@ cvRunHaarClassifierCascadeSum( const CvHaarClassifierCascade* _cascade,
sum = _mm256_add_ps(sum, _mm256_mul_ps(offset, weight));
if( nodes[0]->feature.rect[2].p0 )
tmp[0] = calc_sum(nodes[0]->feature.rect[2],p_offset) * nodes[0]->feature.rect[2].weight;
tmp[0] = calc_sumf(nodes[0]->feature.rect[2],p_offset) * nodes[0]->feature.rect[2].weight;
if( nodes[1]->feature.rect[2].p0 )
tmp[1] = calc_sum(nodes[1]->feature.rect[2],p_offset) * nodes[1]->feature.rect[2].weight;
tmp[1] = calc_sumf(nodes[1]->feature.rect[2],p_offset) * nodes[1]->feature.rect[2].weight;
if( nodes[2]->feature.rect[2].p0 )
tmp[2] = calc_sum(nodes[2]->feature.rect[2],p_offset) * nodes[2]->feature.rect[2].weight;
tmp[2] = calc_sumf(nodes[2]->feature.rect[2],p_offset) * nodes[2]->feature.rect[2].weight;
if( nodes[3]->feature.rect[2].p0 )
tmp[3] = calc_sum(nodes[3]->feature.rect[2],p_offset) * nodes[3]->feature.rect[2].weight;
tmp[3] = calc_sumf(nodes[3]->feature.rect[2],p_offset) * nodes[3]->feature.rect[2].weight;
if( nodes[4]->feature.rect[2].p0 )
tmp[4] = calc_sum(nodes[4]->feature.rect[2],p_offset) * nodes[4]->feature.rect[2].weight;
tmp[4] = calc_sumf(nodes[4]->feature.rect[2],p_offset) * nodes[4]->feature.rect[2].weight;
if( nodes[5]->feature.rect[2].p0 )
tmp[5] = calc_sum(nodes[5]->feature.rect[2],p_offset) * nodes[5]->feature.rect[2].weight;
tmp[5] = calc_sumf(nodes[5]->feature.rect[2],p_offset) * nodes[5]->feature.rect[2].weight;
if( nodes[6]->feature.rect[2].p0 )
tmp[6] = calc_sum(nodes[6]->feature.rect[2],p_offset) * nodes[6]->feature.rect[2].weight;
tmp[6] = calc_sumf(nodes[6]->feature.rect[2],p_offset) * nodes[6]->feature.rect[2].weight;
if( nodes[7]->feature.rect[2].p0 )
tmp[7] = calc_sum(nodes[7]->feature.rect[2],p_offset) * nodes[7]->feature.rect[2].weight;
tmp[7] = calc_sumf(nodes[7]->feature.rect[2],p_offset) * nodes[7]->feature.rect[2].weight;
sum = _mm256_add_ps(sum, _mm256_load_ps(tmp));

View File

@ -390,10 +390,7 @@ class FunctionTests(OpenCVTests):
def test_DrawChessboardCorners(self):
im = cv.CreateImage((512,512), cv.IPL_DEPTH_8U, 3)
cv.SetZero(im)
cv.DrawChessboardCorners(im, (5, 5), [ (100,100) for i in range(5 * 5) ], 1)
self.assert_(cv.Sum(im)[0] > 0)
self.assertRaises(TypeError, lambda: cv.DrawChessboardCorners(im, (4, 5), [ (100,100) for i in range(5 * 5) ], 1))
cv.DrawChessboardCorners(im, (5, 5), [ ((i/5)*100+50,(i%5)*100+50) for i in range(5 * 5) ], 1)
def test_ExtractSURF(self):
img = self.get_sample("samples/c/lena.jpg", 0)

View File

@ -628,7 +628,7 @@ bool DpSeamFinder::getSeamTips(int comp1, int comp2, Point &p1, Point &p2)
{
for (int j = i+1; j < nlabels; ++j)
{
double size1 = points[i].size(), size2 = points[j].size();
double size1 = static_cast<double>(points[i].size()), size2 = static_cast<double>(points[j].size());
double cx1 = cvRound(sum[i].x / size1), cy1 = cvRound(sum[i].y / size1);
double cx2 = cvRound(sum[j].x / size2), cy2 = cvRound(sum[j].y / size1);
@ -648,7 +648,7 @@ bool DpSeamFinder::getSeamTips(int comp1, int comp2, Point &p1, Point &p2)
for (int i = 0; i < 2; ++i)
{
double size = points[idx[i]].size();
double size = static_cast<double>(points[idx[i]].size());
double cx = cvRound(sum[idx[i]].x / size);
double cy = cvRound(sum[idx[i]].y / size);
@ -1036,7 +1036,7 @@ void DpSeamFinder::updateLabelsUsingSeam(
for (map<int, int>::iterator itr = connect2.begin(); itr != connect2.end(); ++itr)
{
double len = contours_[comp1].size();
double len = static_cast<double>(contours_[comp1].size());
isAdjComp[itr->first] = itr->second / len > 0.05 && connectOther.find(itr->first)->second / len < 0.1;
}

View File

@ -6352,7 +6352,9 @@ namespace internal {
// Valid only for fast death tests. Indicates the code is running in the
// child process of a fast style death test.
# if !GTEST_OS_WINDOWS
static bool g_in_fast_death_test_child = false;
# endif
// Returns a Boolean value indicating whether the caller is currently
// executing in the context of the death test child process. Tools such as

View File

@ -1,8 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="output" path="bin/classes"/>
</classpath>

View File

@ -1,6 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>OpenCV Sample - 15-puzzle</name>
<name>15-puzzle</name>
<comment></comment>
<projects>
</projects>

View File

@ -1,4 +1,3 @@
#Wed Jun 29 04:36:40 MSD 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.5
org.eclipse.jdt.core.compiler.compliance=1.5

View File

@ -1,29 +1,28 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.opencv.samples.puzzle15"
android:versionCode="21"
android:versionName="2.1">
package="org.opencv.samples.puzzle15"
android:versionCode="1"
android:versionName="1.0" >
<uses-sdk android:minSdkVersion="8"/>
<application
android:icon="@drawable/icon"
android:label="@string/app_name" >
<activity
android:name=".Puzzle15Activity"
android:label="@string/app_name"
android:screenOrientation="landscape"
android:configChanges="keyboardHidden|orientation" >
<application android:label="@string/app_name" android:icon="@drawable/icon">
<activity android:name="puzzle15Activity"
android:label="@string/app_name"
android:screenOrientation="landscape"
android:configChanges="keyboardHidden|orientation">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
<supports-screens android:resizeable="true"
android:smallScreens="true"
android:normalScreens="true"
android:largeScreens="true"
android:anyDensity="true" />
<uses-sdk android:minSdkVersion="8" />
<uses-permission android:name="android.permission.CAMERA"/>
<uses-feature android:name="android.hardware.camera" android:required="false"/>

View File

@ -0,0 +1,11 @@
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<org.opencv.android.JavaCameraView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:id="@+id/puzzle_activity_surface_view" />
</LinearLayout>

View File

@ -0,0 +1,6 @@
<menu xmlns:android="http://schemas.android.com/apk/res/android">
<item android:id="@+id/menu_start_new_game"
android:title="@string/menu_start_new_game"
android:orderInCategory="100" />
<item android:id="@+id/menu_toggle_tile_numbers" android:title="@string/menu_toggle_tile_numbers"></item>
</menu>

View File

@ -1,4 +1,6 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">OCV 15 Puzzle</string>
</resources>
<string name="menu_toggle_tile_numbers">Show/hide tile numbers</string>
<string name="menu_start_new_game">Start new game</string>
</resources>

View File

@ -0,0 +1,130 @@
package org.opencv.samples.puzzle15;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Mat;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener;
import org.opencv.android.JavaCameraView;
import android.os.Bundle;
import android.app.Activity;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.Window;
import android.view.View;
public class Puzzle15Activity extends Activity implements CvCameraViewListener, View.OnTouchListener {
private static final String TAG = "Sample::Puzzle15::Activity";
private JavaCameraView mOpenCvCameraView;
private Puzzle15Processor mPuzzle15;
private int mGameWidth;
private int mGameHeight;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
/* Now enable camera view to start receiving frames */
mOpenCvCameraView.setOnTouchListener(Puzzle15Activity.this);
mOpenCvCameraView.enableView();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.activity_puzzle15);
mOpenCvCameraView = (JavaCameraView) findViewById(R.id.puzzle_activity_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
mPuzzle15 = new Puzzle15Processor();
mPuzzle15.prepareNewGame();
}
@Override
public void onPause()
{
mOpenCvCameraView.disableView();
super.onPause();
}
@Override
public void onResume()
{
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback);
}
public void onDestroy() {
super.onDestroy();
mOpenCvCameraView.disableView();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.activity_puzzle15, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "Menu Item selected " + item);
if (item.getItemId() == R.id.menu_start_new_game) {
/* We need to start new game */
mPuzzle15.prepareNewGame();
} else if (item.getItemId() == R.id.menu_toggle_tile_numbers) {
/* We need to enable or disable drawing of the tile numbers */
mPuzzle15.toggleTileNumbers();
}
return true;
}
public void onCameraViewStarted(int width, int height) {
mGameWidth = width;
mGameHeight = height;
mPuzzle15.prepareGameSize(width, height);
}
public void onCameraViewStopped() {
}
public Mat onCameraFrame(Mat inputFrame) {
return mPuzzle15.puzzleFrame(inputFrame);
}
public boolean onTouch(View view, MotionEvent event) {
int xpos, ypos;
xpos = (view.getWidth() - mGameWidth) / 2;
xpos = (int)event.getX() - xpos;
ypos = (view.getHeight() - mGameHeight) / 2;
ypos = (int)event.getY() - ypos;
if (xpos >=0 && xpos <= mGameWidth && ypos >=0 && ypos <= mGameHeight) {
/* click is inside the picture. Deliver this event to processor */
mPuzzle15.deliverTouchEvent(xpos, ypos);
}
return false;
}
}

View File

@ -0,0 +1,193 @@
package org.opencv.samples.puzzle15;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.core.Point;
import android.util.Log;
/**
* This class is a controller for puzzle game.
* It converts the image from Camera into the shuffled image
*/
public class Puzzle15Processor {
private static final int GRID_SIZE = 4;
private static final int GRID_AREA = GRID_SIZE * GRID_SIZE;
private static final int GRID_EMPTY_INDEX = GRID_AREA - 1;
private static final String TAG = "Puzzle15Processor";
private int[] mIndexes;
private int[] mTextWidths;
private int[] mTextHeights;
private Mat mRgba15;
private Mat[] mCells;
private Mat[] mCells15;
private boolean mShowTileNumbers = true;
public Puzzle15Processor() {
mTextWidths = new int[GRID_AREA];
mTextHeights = new int[GRID_AREA];
mIndexes = new int [GRID_AREA];
for (int i = 0; i < GRID_AREA; i++)
mIndexes[i] = i;
}
/* this method is intended to make processor prepared for a new game */
public synchronized void prepareNewGame() {
do {
shuffle(mIndexes);
} while (!isPuzzleSolvable());
}
/* This method is to make the processor know the size of the frames that
* will be delivered via puzzleFrame.
* If the frames will be different size - then the result is unpredictable
*/
public synchronized void prepareGameSize(int width, int height) {
mRgba15 = new Mat(height, width, CvType.CV_8UC4);
mCells = new Mat[GRID_AREA];
mCells15 = new Mat[GRID_AREA];
for (int i = 0; i < GRID_SIZE; i++) {
for (int j = 0; j < GRID_SIZE; j++) {
int k = i * GRID_SIZE + j;
mCells15[k] = mRgba15.submat(i * height / GRID_SIZE, (i + 1) * height / GRID_SIZE, j * width / GRID_SIZE, (j + 1) * width / GRID_SIZE);
}
}
for (int i = 0; i < GRID_AREA; i++) {
Size s = Core.getTextSize(Integer.toString(i + 1), 3/* CV_FONT_HERSHEY_COMPLEX */, 1, 2, null);
mTextHeights[i] = (int) s.height;
mTextWidths[i] = (int) s.width;
}
}
/* this method to be called from the outside. it processes the frame and shuffles
* the tiles as specified by mIndexes array
*/
public synchronized Mat puzzleFrame(Mat inputPicture) {
int rows = inputPicture.rows();
int cols = inputPicture.cols();
rows = rows - rows%4;
cols = cols - cols%4;
for (int i = 0; i < GRID_SIZE; i++) {
for (int j = 0; j < GRID_SIZE; j++) {
int k = i * GRID_SIZE + j;
mCells[k] = inputPicture.submat(i * inputPicture.rows() / GRID_SIZE, (i + 1) * inputPicture.rows() / GRID_SIZE, j * inputPicture.cols()/ GRID_SIZE, (j + 1) * inputPicture.cols() / GRID_SIZE);
}
}
rows = rows - rows%4;
cols = cols - cols%4;
// copy shuffled tiles
for (int i = 0; i < GRID_AREA; i++) {
int idx = mIndexes[i];
if (idx == GRID_EMPTY_INDEX)
mCells15[i].setTo(new Scalar(0x33, 0x33, 0x33, 0xFF));
else {
mCells[idx].copyTo(mCells15[i]);
if (mShowTileNumbers) {
Core.putText(mCells15[i], Integer.toString(1 + idx), new Point((cols / GRID_SIZE - mTextWidths[idx]) / 2,
(rows / GRID_SIZE + mTextHeights[idx]) / 2), 3/* CV_FONT_HERSHEY_COMPLEX */, 1, new Scalar(255, 0, 0, 255), 2);
}
}
}
drawGrid(cols, rows, mRgba15);
return mRgba15;
}
public void toggleTileNumbers() {
mShowTileNumbers = !mShowTileNumbers;
}
public void deliverTouchEvent(int x, int y) {
int rows = mRgba15.rows();
int cols = mRgba15.cols();
int row = (int) Math.floor(y * GRID_SIZE / rows);
int col = (int) Math.floor(x * GRID_SIZE / cols);
if (row < 0 || row >= GRID_SIZE || col < 0 || col >= GRID_SIZE) {
Log.e(TAG, "It is not expected to get touch event outside of picture");
return ;
}
int idx = row * GRID_SIZE + col;
int idxtoswap = -1;
// left
if (idxtoswap < 0 && col > 0)
if (mIndexes[idx - 1] == GRID_EMPTY_INDEX)
idxtoswap = idx - 1;
// right
if (idxtoswap < 0 && col < GRID_SIZE - 1)
if (mIndexes[idx + 1] == GRID_EMPTY_INDEX)
idxtoswap = idx + 1;
// top
if (idxtoswap < 0 && row > 0)
if (mIndexes[idx - GRID_SIZE] == GRID_EMPTY_INDEX)
idxtoswap = idx - GRID_SIZE;
// bottom
if (idxtoswap < 0 && row < GRID_SIZE - 1)
if (mIndexes[idx + GRID_SIZE] == GRID_EMPTY_INDEX)
idxtoswap = idx + GRID_SIZE;
// swap
if (idxtoswap >= 0) {
synchronized (this) {
int touched = mIndexes[idx];
mIndexes[idx] = mIndexes[idxtoswap];
mIndexes[idxtoswap] = touched;
}
}
}
private void drawGrid(int cols, int rows, Mat drawMat) {
for (int i = 1; i < GRID_SIZE; i++) {
Core.line(drawMat, new Point(0, i * rows / GRID_SIZE), new Point(cols, i * rows / GRID_SIZE), new Scalar(0, 255, 0, 255), 3);
Core.line(drawMat, new Point(i * cols / GRID_SIZE, 0), new Point(i * cols / GRID_SIZE, rows), new Scalar(0, 255, 0, 255), 3);
}
}
private static void shuffle(int[] array) {
for (int i = array.length; i > 1; i--) {
int temp = array[i - 1];
int randIx = (int) (Math.random() * i);
array[i - 1] = array[randIx];
array[randIx] = temp;
}
}
private boolean isPuzzleSolvable() {
int sum = 0;
for (int i = 0; i < GRID_AREA; i++) {
if (mIndexes[i] == GRID_EMPTY_INDEX)
sum += (i / GRID_SIZE) + 1;
else {
int smaller = 0;
for (int j = i + 1; j < GRID_AREA; j++) {
if (mIndexes[j] < mIndexes[i])
smaller++;
}
sum += smaller;
}
}
return sum % 2 == 0;
}
}

View File

@ -1,117 +0,0 @@
package org.opencv.samples.puzzle15;
import java.util.List;
import org.opencv.core.Size;
import org.opencv.highgui.VideoCapture;
import org.opencv.highgui.Highgui;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "OCVSample::BaseView";
private SurfaceHolder mHolder;
private VideoCapture mCamera;
public SampleCvViewBase(Context context) {
super(context);
mHolder = getHolder();
mHolder.addCallback(this);
Log.i(TAG, "Instantiated new " + this.getClass());
}
public synchronized boolean openCamera() {
Log.i(TAG, "Opening Camera");
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
releaseCamera();
Log.e(TAG, "Can't open native camera");
return false;
}
return true;
}
public synchronized void releaseCamera() {
Log.i(TAG, "Releasing Camera");
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
public synchronized void setupCamera(int width, int height) {
if (mCamera != null && mCamera.isOpened()) {
Log.i(TAG, "Setup Camera - " + width + "x" + height);
List<Size> sizes = mCamera.getSupportedPreviewSizes();
int mFrameWidth = width;
int mFrameHeight = height;
// selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = (int) size.width;
mFrameHeight = (int) size.height;
minDiff = Math.abs(size.height - height);
}
}
}
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "called surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "called surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "called surfaceDestroyed");
}
protected abstract Bitmap processFrame(VideoCapture capture);
public void run() {
Log.i(TAG, "Started processing thread");
while (true) {
Bitmap bmp = null;
synchronized (this) {
if (mCamera == null)
break;
if (!mCamera.grab()) {
Log.e(TAG, "mCamera.grab() failed");
break;
}
bmp = processFrame(mCamera);
}
if (bmp != null) {
Canvas canvas = mHolder.lockCanvas();
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
canvas.drawBitmap(bmp, (canvas.getWidth() - bmp.getWidth()) / 2, (canvas.getHeight() - bmp.getHeight()) / 2, null);
mHolder.unlockCanvasAndPost(canvas);
}
bmp.recycle();
}
}
Log.i(TAG, "Finished processing thread");
}
}

View File

@ -1,123 +0,0 @@
package org.opencv.samples.puzzle15;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
/** Activity class implements LoaderCallbackInterface to handle OpenCV initialization status **/
public class puzzle15Activity extends Activity {
private static final String TAG = "OCVSample::Activity";
private MenuItem mItemNewGame;
private MenuItem mItemToggleNumbers;
private puzzle15View mView = null;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Create and set View
mView = new puzzle15View(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
finish();
}
});
MarketErrorMessage.show();
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
public puzzle15Activity() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
Log.i(TAG, "called onPause");
if (null != mView)
mView.releaseCamera();
super.onPause();
}
@Override
protected void onResume() {
Log.i(TAG, "called onResume");
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack)) {
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
Log.i(TAG, "called onCreate");
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
Log.i(TAG, "called onCreateOptionsMenu");
mItemNewGame = menu.add("Start new game");
mItemToggleNumbers = menu.add("Show/hide tile numbers");
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item == mItemNewGame)
mView.startNewGame();
else if (item == mItemToggleNumbers)
mView.tolggleTileNumbers();
return true;
}
}

View File

@ -1,250 +0,0 @@
package org.opencv.samples.puzzle15;
import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.highgui.Highgui;
import org.opencv.highgui.VideoCapture;
import android.content.Context;
import android.graphics.Bitmap;
import android.util.Log;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
import android.view.View;
import android.view.View.OnTouchListener;
public class puzzle15View extends SampleCvViewBase implements OnTouchListener {
private static final String TAG = "OCVSample::View";
private Mat mRgba;
private Mat mRgba15;
private Mat[] mCells;
private Mat[] mCells15;
private int[] mIndexses;
private int[] mTextWidths;
private int[] mTextHeights;
private boolean mShowTileNumbers = true;
int gridSize = 4;
int gridArea = gridSize * gridSize;
int gridEmptyIdx = gridArea - 1;
public puzzle15View(Context context) {
super(context);
setOnTouchListener(this);
mTextWidths = new int[gridArea];
mTextHeights = new int[gridArea];
for (int i = 0; i < gridArea; i++) {
Size s = Core.getTextSize(Integer.toString(i + 1), 3/* CV_FONT_HERSHEY_COMPLEX */, 1, 2, null);
mTextHeights[i] = (int) s.height;
mTextWidths[i] = (int) s.width;
}
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "called surfaceCreated");
synchronized (this) {
// initialize Mat before usage
mRgba = new Mat();
}
super.surfaceCreated(holder);
}
public static void shuffle(int[] array) {
for (int i = array.length; i > 1; i--) {
int temp = array[i - 1];
int randIx = (int) (Math.random() * i);
array[i - 1] = array[randIx];
array[randIx] = temp;
}
}
public boolean isPuzzleSolvable() {
if (gridSize != 4)
return true;
int sum = 0;
for (int i = 0; i < gridArea; i++) {
if (mIndexses[i] == gridEmptyIdx)
sum += (i / gridSize) + 1;
else {
int smaller = 0;
for (int j = i + 1; j < gridArea; j++) {
if (mIndexses[j] < mIndexses[i])
smaller++;
}
sum += smaller;
}
}
return sum % 2 == 0;
}
private void createPuzzle(int cols, int rows, int type) {
mCells = new Mat[gridArea];
mCells15 = new Mat[gridArea];
mRgba15 = new Mat(rows, cols, type);
mIndexses = new int[gridArea];
for (int i = 0; i < gridSize; i++) {
for (int j = 0; j < gridSize; j++) {
int k = i * gridSize + j;
mIndexses[k] = k;
mCells[k] = mRgba.submat(i * rows / gridSize, (i + 1) * rows / gridSize, j * cols / gridSize, (j + 1) * cols / gridSize);
mCells15[k] = mRgba15.submat(i * rows / gridSize, (i + 1) * rows / gridSize, j * cols / gridSize, (j + 1) * cols / gridSize);
}
}
startNewGame();
}
private void drawGrid(int cols, int rows) {
for (int i = 1; i < gridSize; i++) {
Core.line(mRgba15, new Point(0, i * rows / gridSize), new Point(cols, i * rows / gridSize), new Scalar(0, 255, 0, 255), 3);
Core.line(mRgba15, new Point(i * cols / gridSize, 0), new Point(i * cols / gridSize, rows), new Scalar(0, 255, 0, 255), 3);
}
}
public synchronized void startNewGame() {
do {
shuffle(mIndexses);
} while (!isPuzzleSolvable());
}
public void tolggleTileNumbers() {
mShowTileNumbers = !mShowTileNumbers;
}
@Override
protected Bitmap processFrame(VideoCapture capture) {
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
int cols = mRgba.cols();
int rows = mRgba.rows();
rows = rows - rows%4;
cols = cols - cols%4;
if (mCells == null)
createPuzzle(cols, rows, mRgba.type());
else if(mRgba15.cols() != cols || mRgba15.rows() != rows) {
releaseMats();
createPuzzle(cols, rows, mRgba.type());
}
// copy shuffled tiles
for (int i = 0; i < gridArea; i++) {
int idx = mIndexses[i];
if (idx == gridEmptyIdx)
mCells15[i].setTo(new Scalar(0x33, 0x33, 0x33, 0xFF));
else {
mCells[idx].copyTo(mCells15[i]);
if (mShowTileNumbers) {
Core.putText(mCells15[i], Integer.toString(1 + idx), new Point((cols / gridSize - mTextWidths[idx]) / 2,
(rows / gridSize + mTextHeights[idx]) / 2), 3/* CV_FONT_HERSHEY_COMPLEX */, 1, new Scalar(255, 0, 0, 255), 2);
}
}
}
drawGrid(cols, rows);
Bitmap bmp = Bitmap.createBitmap(cols, rows, Bitmap.Config.ARGB_8888);
try {
Utils.matToBitmap(mRgba15, bmp);
return bmp;
} catch(Exception e) {
Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle();
return null;
}
}
@Override
public void run() {
super.run();
synchronized (this) {
releaseMats();
if (mRgba != null)
mRgba.release();
mRgba = null;
}
}
private void releaseMats() {
// Explicitly deallocate Mats
if (mCells != null) {
for (Mat m : mCells)
m.release();
}
if (mCells15 != null) {
for (Mat m : mCells15)
m.release();
}
if (mRgba15 != null)
mRgba15.release();
mRgba15 = null;
mCells = null;
mCells15 = null;
mIndexses = null;
}
public boolean onTouch(View v, MotionEvent event) {
if(mRgba==null) return false;
int cols = mRgba.cols();
int rows = mRgba.rows();
float xoffset = (getWidth() - cols) / 2;
float yoffset = (getHeight() - rows) / 2;
float x = event.getX() - xoffset;
float y = event.getY() - yoffset;
int row = (int) Math.floor(y * gridSize / rows);
int col = (int) Math.floor(x * gridSize / cols);
if (row < 0 || row >= gridSize || col < 0 || col >= gridSize)
return false;
int idx = row * gridSize + col;
int idxtoswap = -1;
// left
if (idxtoswap < 0 && col > 0)
if (mIndexses[idx - 1] == gridEmptyIdx)
idxtoswap = idx - 1;
// right
if (idxtoswap < 0 && col < gridSize - 1)
if (mIndexses[idx + 1] == gridEmptyIdx)
idxtoswap = idx + 1;
// top
if (idxtoswap < 0 && row > 0)
if (mIndexses[idx - gridSize] == gridEmptyIdx)
idxtoswap = idx - gridSize;
// bottom
if (idxtoswap < 0 && row < gridSize - 1)
if (mIndexses[idx + gridSize] == gridEmptyIdx)
idxtoswap = idx + gridSize;
// swap
if (idxtoswap >= 0) {
synchronized (this) {
int touched = mIndexses[idx];
mIndexses[idx] = mIndexses[idxtoswap];
mIndexses[idxtoswap] = touched;
}
}
return false;// don't need subsequent touch events
}
}

View File

@ -1,8 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="output" path="bin/classes"/>
</classpath>

View File

@ -0,0 +1,11 @@
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<org.opencv.android.JavaCameraView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:id="@+id/color_blob_detection_activity_surface_view" />
</LinearLayout>

View File

@ -1,62 +1,53 @@
package org.opencv.samples.colorblobdetect;
import java.util.List;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener;
import org.opencv.android.JavaCameraView;
import org.opencv.imgproc.Imgproc;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.view.View.OnTouchListener;
public class ColorBlobDetectionActivity extends Activity {
private static final String TAG = "OCVSample::Activity";
public class ColorBlobDetectionActivity extends Activity implements OnTouchListener, CvCameraViewListener {
private static final String TAG = "OCVSample::Activity";
private ColorBlobDetectionView mView;
private boolean mIsColorSelected = false;
private Mat mRgba;
private Scalar mBlobColorRgba;
private Scalar mBlobColorHsv;
private ColorBlobDetector mDetector;
private Mat mSpectrum;
private Size SPECTRUM_SIZE;
private Scalar CONTOUR_COLOR;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
private JavaCameraView mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Create and set View
mView = new ColorBlobDetectionView(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
finish();
}
});
MarketErrorMessage.show();
mOpenCvCameraView.enableView();
mOpenCvCameraView.setOnTouchListener(ColorBlobDetectionActivity.this);
} break;
default:
{
@ -70,25 +61,6 @@ public class ColorBlobDetectionActivity extends Activity {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
Log.i(TAG, "called onPause");
if (null != mView)
mView.releaseCamera();
super.onPause();
}
@Override
protected void onResume() {
Log.i(TAG, "called onResume");
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack)) {
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
@ -96,5 +68,117 @@ public class ColorBlobDetectionActivity extends Activity {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.color_blob_detection_surface_view);
mOpenCvCameraView = (JavaCameraView)findViewById(R.id.color_blob_detection_activity_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
}
@Override
public void onPause()
{
mOpenCvCameraView.disableView();
super.onPause();
}
@Override
public void onResume()
{
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback);
}
public void onDestroy() {
super.onDestroy();
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC4);
mDetector = new ColorBlobDetector();
mSpectrum = new Mat();
mBlobColorRgba = new Scalar(255);
mBlobColorHsv = new Scalar(255);
SPECTRUM_SIZE = new Size(200, 32);
CONTOUR_COLOR = new Scalar(255,0,0,255);
}
public void onCameraViewStopped() {
mRgba.release();
}
public boolean onTouch(View v, MotionEvent event) {
int cols = mRgba.cols();
int rows = mRgba.rows();
int xOffset = (mOpenCvCameraView.getWidth() - cols) / 2;
int yOffset = (mOpenCvCameraView.getHeight() - rows) / 2;
int x = (int)event.getX() - xOffset;
int y = (int)event.getY() - yOffset;
Log.i(TAG, "Touch image coordinates: (" + x + ", " + y + ")");
if ((x < 0) || (y < 0) || (x > cols) || (y > rows)) return false;
Rect touchedRect = new Rect();
touchedRect.x = (x>4) ? x-4 : 0;
touchedRect.y = (y>4) ? y-4 : 0;
touchedRect.width = (x+4 < cols) ? x + 4 - touchedRect.x : cols - touchedRect.x;
touchedRect.height = (y+4 < rows) ? y + 4 - touchedRect.y : rows - touchedRect.y;
Mat touchedRegionRgba = mRgba.submat(touchedRect);
Mat touchedRegionHsv = new Mat();
Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL);
// Calculate average color of touched region
mBlobColorHsv = Core.sumElems(touchedRegionHsv);
int pointCount = touchedRect.width*touchedRect.height;
for (int i = 0; i < mBlobColorHsv.val.length; i++)
mBlobColorHsv.val[i] /= pointCount;
mBlobColorRgba = converScalarHsv2Rgba(mBlobColorHsv);
Log.i(TAG, "Touched rgba color: (" + mBlobColorRgba.val[0] + ", " + mBlobColorRgba.val[1] +
", " + mBlobColorRgba.val[2] + ", " + mBlobColorRgba.val[3] + ")");
mDetector.setHsvColor(mBlobColorHsv);
Imgproc.resize(mDetector.getSpectrum(), mSpectrum, SPECTRUM_SIZE);
mIsColorSelected = true;
return false; // don't need subsequent touch events
}
public Mat onCameraFrame(Mat inputFrame) {
inputFrame.copyTo(mRgba);
if (mIsColorSelected) {
mDetector.process(mRgba);
List<MatOfPoint> contours = mDetector.getContours();
Log.e(TAG, "Contours count: " + contours.size());
Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR);
Mat colorLabel = mRgba.submat(2, 34, 2, 34);
colorLabel.setTo(mBlobColorRgba);
Mat spectrumLabel = mRgba.submat(2, 2 + mSpectrum.rows(), 38, 38 + mSpectrum.cols());
mSpectrum.copyTo(spectrumLabel);
}
return mRgba;
}
private Scalar converScalarHsv2Rgba(Scalar hsvColor) {
Mat pointMatRgba = new Mat();
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);
return new Scalar(pointMatRgba.get(0, 0));
}
}

View File

@ -1,151 +0,0 @@
package org.opencv.samples.colorblobdetect;
import java.util.List;
import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.highgui.Highgui;
import org.opencv.highgui.VideoCapture;
import org.opencv.imgproc.Imgproc;
import android.content.Context;
import android.graphics.Bitmap;
import android.util.Log;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
import android.view.View;
import android.view.View.OnTouchListener;
public class ColorBlobDetectionView extends SampleCvViewBase implements OnTouchListener {
private static final String TAG = "OCVSample::View";
private Mat mRgba;
private boolean mIsColorSelected = false;
private Scalar mBlobColorRgba = new Scalar(255);
private Scalar mBlobColorHsv = new Scalar(255);
private ColorBlobDetector mDetector = new ColorBlobDetector();
private Mat mSpectrum = new Mat();
private static Size SPECTRUM_SIZE = new Size(200, 32);
private static final Scalar CONTOUR_COLOR = new Scalar(255,0,0,255);
public ColorBlobDetectionView(Context context) {
super(context);
setOnTouchListener(this);
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "called surfaceCreated");
synchronized (this) {
// initialize Mat before usage
mRgba = new Mat();
}
super.surfaceCreated(holder);
}
private Scalar converScalarHsv2Rgba(Scalar hsvColor) {
Mat pointMatRgba = new Mat();
Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);
return new Scalar(pointMatRgba.get(0, 0));
}
public boolean onTouch(View v, MotionEvent event) {
int cols = mRgba.cols();
int rows = mRgba.rows();
int xOffset = (getWidth() - cols) / 2;
int yOffset = (getHeight() - rows) / 2;
int x = (int)event.getX() - xOffset;
int y = (int)event.getY() - yOffset;
Log.i(TAG, "Touch image coordinates: (" + x + ", " + y + ")");
if ((x < 0) || (y < 0) || (x > cols) || (y > rows)) return false;
Rect touchedRect = new Rect();
touchedRect.x = (x>4) ? x-4 : 0;
touchedRect.y = (y>4) ? y-4 : 0;
touchedRect.width = (x+4 < cols) ? x + 4 - touchedRect.x : cols - touchedRect.x;
touchedRect.height = (y+4 < rows) ? y + 4 - touchedRect.y : rows - touchedRect.y;
Mat touchedRegionRgba = mRgba.submat(touchedRect);
Mat touchedRegionHsv = new Mat();
Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL);
// Calculate average color of touched region
mBlobColorHsv = Core.sumElems(touchedRegionHsv);
int pointCount = touchedRect.width*touchedRect.height;
for (int i = 0; i < mBlobColorHsv.val.length; i++)
mBlobColorHsv.val[i] /= pointCount;
mBlobColorRgba = converScalarHsv2Rgba(mBlobColorHsv);
Log.i(TAG, "Touched rgba color: (" + mBlobColorRgba.val[0] + ", " + mBlobColorRgba.val[1] +
", " + mBlobColorRgba.val[2] + ", " + mBlobColorRgba.val[3] + ")");
mDetector.setHsvColor(mBlobColorHsv);
Imgproc.resize(mDetector.getSpectrum(), mSpectrum, SPECTRUM_SIZE);
mIsColorSelected = true;
return false; // don't need subsequent touch events
}
@Override
protected Bitmap processFrame(VideoCapture capture) {
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);
if (mIsColorSelected) {
mDetector.process(mRgba);
List<MatOfPoint> contours = mDetector.getContours();
Log.e(TAG, "Contours count: " + contours.size());
Core.drawContours(mRgba, contours, -1, CONTOUR_COLOR);
Mat colorLabel = mRgba.submat(2, 34, 2, 34);
colorLabel.setTo(mBlobColorRgba);
Mat spectrumLabel = mRgba.submat(2, 2 + mSpectrum.rows(), 38, 38 + mSpectrum.cols());
mSpectrum.copyTo(spectrumLabel);
}
try {
Utils.matToBitmap(mRgba, bmp);
} catch(Exception e) {
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle();
bmp = null;
}
return bmp;
}
@Override
public void run() {
super.run();
synchronized (this) {
// Explicitly deallocate Mats
if (mRgba != null)
mRgba.release();
mRgba = null;
}
}
}

View File

@ -1,117 +0,0 @@
package org.opencv.samples.colorblobdetect;
import java.util.List;
import org.opencv.core.Size;
import org.opencv.highgui.VideoCapture;
import org.opencv.highgui.Highgui;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "OCVSample::BaseView";
private SurfaceHolder mHolder;
private VideoCapture mCamera;
public SampleCvViewBase(Context context) {
super(context);
mHolder = getHolder();
mHolder.addCallback(this);
Log.i(TAG, "Instantiated new " + this.getClass());
}
public synchronized boolean openCamera() {
Log.i(TAG, "Opening Camera");
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
releaseCamera();
Log.e(TAG, "Can't open native camera");
return false;
}
return true;
}
public synchronized void releaseCamera() {
Log.i(TAG, "Releasing Camera");
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
public synchronized void setupCamera(int width, int height) {
if (mCamera != null && mCamera.isOpened()) {
Log.i(TAG, "Setup Camera - " + width + "x" + height);
List<Size> sizes = mCamera.getSupportedPreviewSizes();
int mFrameWidth = width;
int mFrameHeight = height;
// selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = (int) size.width;
mFrameHeight = (int) size.height;
minDiff = Math.abs(size.height - height);
}
}
}
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "called surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "called surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "called surfaceDestroyed");
}
protected abstract Bitmap processFrame(VideoCapture capture);
public void run() {
Log.i(TAG, "Started processing thread");
while (true) {
Bitmap bmp = null;
synchronized (this) {
if (mCamera == null)
break;
if (!mCamera.grab()) {
Log.e(TAG, "mCamera.grab() failed");
break;
}
bmp = processFrame(mCamera);
}
if (bmp != null) {
Canvas canvas = mHolder.lockCanvas();
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
canvas.drawBitmap(bmp, (canvas.getWidth() - bmp.getWidth()) / 2, (canvas.getHeight() - bmp.getHeight()) / 2, null);
mHolder.unlockCanvasAndPost(canvas);
}
bmp.recycle();
}
}
Log.i(TAG, "Finished processing thread");
}
}

View File

@ -1,8 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="output" path="bin/classes"/>
</classpath>

View File

@ -0,0 +1,11 @@
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<org.opencv.android.JavaCameraView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:id="@+id/fd_activity_surface_view" />
</LinearLayout>

View File

@ -1,12 +1,26 @@
package org.opencv.samples.fd;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.android.JavaCameraView;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Context;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
@ -14,19 +28,34 @@ import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
public class FdActivity extends Activity {
private static final String TAG = "OCVSample::Activity";
public class FdActivity extends Activity implements CvCameraViewListener {
private MenuItem mItemFace50;
private MenuItem mItemFace40;
private MenuItem mItemFace30;
private MenuItem mItemFace20;
private MenuItem mItemType;
private int mDetectorType = 0;
private String[] mDetectorName;
private FdView mView;
private static final String TAG = "OCVSample::Activity";
private static final Scalar FACE_RECT_COLOR = new Scalar(0, 255, 0, 255);
public static final int JAVA_DETECTOR = 0;
public static final int NATIVE_DETECTOR = 1;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
private MenuItem mItemFace50;
private MenuItem mItemFace40;
private MenuItem mItemFace30;
private MenuItem mItemFace20;
private MenuItem mItemType;
private Mat mRgba;
private Mat mGray;
private File mCascadeFile;
private CascadeClassifier mJavaDetector;
private DetectionBasedTracker mNativeDetector;
private int mDetectorType = JAVA_DETECTOR;
private String[] mDetectorName;
private float mRelativeFaceSize = 0;
private int mAbsoluteFaceSize = 0;
private JavaCameraView mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
@ -34,44 +63,41 @@ public class FdActivity extends Activity {
{
Log.i(TAG, "OpenCV loaded successfully");
// Load native libs after OpenCV initialization
// Load native library after(!) OpenCV initialization
System.loadLibrary("detection_based_tracker");
// Create and set View
mView = new FdView(mAppContext);
mView.setDetectorType(mDetectorType);
mView.setMinFaceSize(0.2f);
setContentView(mView);
try {
// load cascade file from application resources
InputStream is = getResources().openRawResource(R.raw.lbpcascade_frontalface);
File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
mCascadeFile = new File(cascadeDir, "lbpcascade_frontalface.xml");
FileOutputStream os = new FileOutputStream(mCascadeFile);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
finish();
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
});
MarketErrorMessage.show();
is.close();
os.close();
mJavaDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath());
if (mJavaDetector.empty()) {
Log.e(TAG, "Failed to load cascade classifier");
mJavaDetector = null;
} else
Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());
mNativeDetector = new DetectionBasedTracker(mCascadeFile.getAbsolutePath(), 0);
cascadeDir.delete();
} catch (IOException e) {
e.printStackTrace();
Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
}
mOpenCvCameraView.enableView();
} break;
default:
{
@ -83,30 +109,12 @@ public class FdActivity extends Activity {
public FdActivity() {
mDetectorName = new String[2];
mDetectorName[FdView.JAVA_DETECTOR] = "Java";
mDetectorName[FdView.NATIVE_DETECTOR] = "Native (tracking)";
mDetectorName[JAVA_DETECTOR] = "Java";
mDetectorName[NATIVE_DETECTOR] = "Native (tracking)";
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
Log.i(TAG, "called onPause");
if (null != mView)
mView.releaseCamera();
super.onPause();
}
@Override
protected void onResume() {
Log.i(TAG, "called onResume");
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack)) {
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
@ -114,6 +122,75 @@ public class FdActivity extends Activity {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.face_detect_surface_view);
mOpenCvCameraView = (JavaCameraView)findViewById(R.id.fd_activity_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
}
@Override
public void onPause()
{
mOpenCvCameraView.disableView();
super.onPause();
}
@Override
public void onResume()
{
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback);
}
public void onDestroy() {
super.onDestroy();
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
mGray = new Mat();
mRgba = new Mat();
}
public void onCameraViewStopped() {
mGray.release();
mRgba.release();
}
public Mat onCameraFrame(Mat inputFrame) {
inputFrame.copyTo(mRgba);
Imgproc.cvtColor(inputFrame, mGray, Imgproc.COLOR_RGBA2GRAY);
if (mAbsoluteFaceSize == 0) {
int height = mGray.rows();
if (Math.round(height * mRelativeFaceSize) > 0) {
mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
}
mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
}
MatOfRect faces = new MatOfRect();
if (mDetectorType == JAVA_DETECTOR) {
if (mJavaDetector != null)
mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
}
else if (mDetectorType == NATIVE_DETECTOR) {
if (mNativeDetector != null)
mNativeDetector.detect(mGray, faces);
}
else {
Log.e(TAG, "Detection method is not selected!");
}
Rect[] facesArray = faces.toArray();
for (int i = 0; i < facesArray.length; i++)
Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
return mRgba;
}
@Override
@ -131,18 +208,37 @@ public class FdActivity extends Activity {
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item == mItemFace50)
mView.setMinFaceSize(0.5f);
setMinFaceSize(0.5f);
else if (item == mItemFace40)
mView.setMinFaceSize(0.4f);
setMinFaceSize(0.4f);
else if (item == mItemFace30)
mView.setMinFaceSize(0.3f);
setMinFaceSize(0.3f);
else if (item == mItemFace20)
mView.setMinFaceSize(0.2f);
setMinFaceSize(0.2f);
else if (item == mItemType) {
mDetectorType = (mDetectorType + 1) % mDetectorName.length;
item.setTitle(mDetectorName[mDetectorType]);
mView.setDetectorType(mDetectorType);
setDetectorType(mDetectorType);
}
return true;
}
private void setMinFaceSize(float faceSize) {
mRelativeFaceSize = faceSize;
mAbsoluteFaceSize = 0;
}
private void setDetectorType(int type) {
if (mDetectorType != type) {
mDetectorType = type;
if (type == NATIVE_DETECTOR) {
Log.i(TAG, "Detection Based Tracker enabled");
mNativeDetector.start();
} else {
Log.i(TAG, "Cascade detector enabled");
mNativeDetector.stop();
}
}
}
}

View File

@ -1,175 +0,0 @@
package org.opencv.samples.fd;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.highgui.Highgui;
import org.opencv.highgui.VideoCapture;
import org.opencv.objdetect.CascadeClassifier;
import android.content.Context;
import android.graphics.Bitmap;
import android.util.Log;
import android.view.SurfaceHolder;
class FdView extends SampleCvViewBase {
private static final String TAG = "OCVSample::View";
private Mat mRgba;
private Mat mGray;
private File mCascadeFile;
private CascadeClassifier mJavaDetector;
private DetectionBasedTracker mNativeDetector;
private static final Scalar FACE_RECT_COLOR = new Scalar(0, 255, 0, 255);
public static final int JAVA_DETECTOR = 0;
public static final int NATIVE_DETECTOR = 1;
private int mDetectorType = JAVA_DETECTOR;
private float mRelativeFaceSize = 0;
private int mAbsoluteFaceSize = 0;
public void setMinFaceSize(float faceSize) {
mRelativeFaceSize = faceSize;
mAbsoluteFaceSize = 0;
}
public void setDetectorType(int type) {
if (mDetectorType != type) {
mDetectorType = type;
if (type == NATIVE_DETECTOR) {
Log.i(TAG, "Detection Based Tracker enabled");
mNativeDetector.start();
} else {
Log.i(TAG, "Cascade detector enabled");
mNativeDetector.stop();
}
}
}
public FdView(Context context) {
super(context);
try {
// load cascade file from application resources
InputStream is = context.getResources().openRawResource(R.raw.lbpcascade_frontalface);
File cascadeDir = context.getDir("cascade", Context.MODE_PRIVATE);
mCascadeFile = new File(cascadeDir, "lbpcascade_frontalface.xml");
FileOutputStream os = new FileOutputStream(mCascadeFile);
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
os.close();
mJavaDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath());
if (mJavaDetector.empty()) {
Log.e(TAG, "Failed to load cascade classifier");
mJavaDetector = null;
} else
Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());
mNativeDetector = new DetectionBasedTracker(mCascadeFile.getAbsolutePath(), 0);
cascadeDir.delete();
} catch (IOException e) {
e.printStackTrace();
Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
}
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "called surfaceCreated");
synchronized (this) {
// initialize Mats before usage
mGray = new Mat();
mRgba = new Mat();
}
super.surfaceCreated(holder);
}
@Override
protected Bitmap processFrame(VideoCapture capture) {
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
if (mAbsoluteFaceSize == 0) {
int height = mGray.rows();
if (Math.round(height * mRelativeFaceSize) > 0) {
mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
}
mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
}
MatOfRect faces = new MatOfRect();
if (mDetectorType == JAVA_DETECTOR) {
if (mJavaDetector != null)
mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
}
else if (mDetectorType == NATIVE_DETECTOR) {
if (mNativeDetector != null)
mNativeDetector.detect(mGray, faces);
}
else {
Log.e(TAG, "Detection method is not selected!");
}
Rect[] facesArray = faces.toArray();
for (int i = 0; i < facesArray.length; i++)
Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);
try {
Utils.matToBitmap(mRgba, bmp);
} catch(Exception e) {
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle();
bmp = null;
}
return bmp;
}
@Override
public void run() {
super.run();
synchronized (this) {
// Explicitly deallocate Mats
if (mRgba != null)
mRgba.release();
if (mGray != null)
mGray.release();
if (mCascadeFile != null)
mCascadeFile.delete();
if (mNativeDetector != null)
mNativeDetector.release();
mRgba = null;
mGray = null;
mCascadeFile = null;
}
}
}

View File

@ -1,123 +0,0 @@
package org.opencv.samples.fd;
import java.util.List;
import org.opencv.core.Size;
import org.opencv.highgui.VideoCapture;
import org.opencv.highgui.Highgui;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "OCVSample::BaseView";
private SurfaceHolder mHolder;
private VideoCapture mCamera;
private FpsMeter mFps;
public SampleCvViewBase(Context context) {
super(context);
mHolder = getHolder();
mHolder.addCallback(this);
mFps = new FpsMeter();
Log.i(TAG, "Instantiated new " + this.getClass());
}
public synchronized boolean openCamera() {
Log.i(TAG, "Opening Camera");
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
releaseCamera();
Log.e(TAG, "Can't open native camera");
return false;
}
return true;
}
public synchronized void releaseCamera() {
Log.i(TAG, "Releasing Camera");
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
public synchronized void setupCamera(int width, int height) {
if (mCamera != null && mCamera.isOpened()) {
Log.i(TAG, "Setup Camera - " + width + "x" + height);
List<Size> sizes = mCamera.getSupportedPreviewSizes();
int mFrameWidth = width;
int mFrameHeight = height;
// selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = (int) size.width;
mFrameHeight = (int) size.height;
minDiff = Math.abs(size.height - height);
}
}
}
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "called surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "called surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "called surfaceDestroyed");
}
protected abstract Bitmap processFrame(VideoCapture capture);
public void run() {
Log.i(TAG, "Started processing thread");
mFps.init();
while (true) {
Bitmap bmp = null;
synchronized (this) {
if (mCamera == null)
break;
if (!mCamera.grab()) {
Log.e(TAG, "mCamera.grab() failed");
break;
}
bmp = processFrame(mCamera);
mFps.measure();
}
if (bmp != null) {
Canvas canvas = mHolder.lockCanvas();
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
canvas.drawBitmap(bmp, (canvas.getWidth() - bmp.getWidth()) / 2, (canvas.getHeight() - bmp.getHeight()) / 2, null);
mFps.draw(canvas, (canvas.getWidth() - bmp.getWidth()) / 2, 0);
mHolder.unlockCanvasAndPost(canvas);
}
bmp.recycle();
}
}
Log.i(TAG, "Finished processing thread");
}
}

View File

@ -1,8 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="output" path="bin/classes"/>
</classpath>

View File

@ -0,0 +1,11 @@
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<org.opencv.android.JavaCameraView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:id="@+id/image_manipulations_activity_surface_view" />
</LinearLayout>

View File

@ -1,12 +1,23 @@
package org.opencv.samples.imagemanipulations;
import java.util.Arrays;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.MatOfFloat;
import org.opencv.core.MatOfInt;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.android.JavaCameraView;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener;
import org.opencv.imgproc.Imgproc;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
@ -14,70 +25,64 @@ import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
public class ImageManipulationsActivity extends Activity {
private static final String TAG = "OCVSample::Activity";
public class ImageManipulationsActivity extends Activity implements CvCameraViewListener {
private static final String TAG = "OCVSample::Activity";
public static final int VIEW_MODE_RGBA = 0;
public static final int VIEW_MODE_HIST = 1;
public static final int VIEW_MODE_CANNY = 2;
public static final int VIEW_MODE_SEPIA = 3;
public static final int VIEW_MODE_SOBEL = 4;
public static final int VIEW_MODE_ZOOM = 5;
public static final int VIEW_MODE_PIXELIZE = 6;
public static final int VIEW_MODE_POSTERIZE = 7;
public static final int VIEW_MODE_RGBA = 0;
public static final int VIEW_MODE_HIST = 1;
public static final int VIEW_MODE_CANNY = 2;
public static final int VIEW_MODE_SEPIA = 3;
public static final int VIEW_MODE_SOBEL = 4;
public static final int VIEW_MODE_ZOOM = 5;
public static final int VIEW_MODE_PIXELIZE = 6;
public static final int VIEW_MODE_POSTERIZE = 7;
private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewHist;
private MenuItem mItemPreviewCanny;
private MenuItem mItemPreviewSepia;
private MenuItem mItemPreviewSobel;
private MenuItem mItemPreviewZoom;
private MenuItem mItemPreviewPixelize;
private MenuItem mItemPreviewPosterize;
private ImageManipulationsView mView;
private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewHist;
private MenuItem mItemPreviewCanny;
private MenuItem mItemPreviewSepia;
private MenuItem mItemPreviewSobel;
private MenuItem mItemPreviewZoom;
private MenuItem mItemPreviewPixelize;
private MenuItem mItemPreviewPosterize;
private JavaCameraView mOpenCvCameraView;
private Size mSize0;
private Size mSizeRgba;
private Size mSizeRgbaInner;
private Mat mRgba;
private Mat mGray;
private Mat mIntermediateMat;
private Mat mHist;
private Mat mMat0;
private MatOfInt mChannels[];
private MatOfInt mHistSize;
private int mHistSizeNum;
private MatOfFloat mRanges;
private Scalar mColorsRGB[];
private Scalar mColorsHue[];
private Scalar mWhilte;
private Point mP1;
private Point mP2;
private float mBuff[];
private Mat mRgbaInnerWindow;
private Mat mGrayInnerWindow;
private Mat mBlurWindow;
private Mat mZoomWindow;
private Mat mZoomCorner;
private Mat mSepiaKernel;
public static int viewMode = VIEW_MODE_RGBA;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Create and set View
mView = new ImageManipulationsView(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
finish();
}
});
MarketErrorMessage.show();
mOpenCvCameraView.enableView();
} break;
default:
{
@ -91,25 +96,6 @@ public class ImageManipulationsActivity extends Activity {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
Log.i(TAG, "called onPause");
if (null != mView)
mView.releaseCamera();
super.onPause();
}
@Override
protected void onResume() {
Log.i(TAG, "called onResume");
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack)) {
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
@ -117,6 +103,30 @@ public class ImageManipulationsActivity extends Activity {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.image_manipulations_surface_view);
mOpenCvCameraView = (JavaCameraView)findViewById(R.id.image_manipulations_activity_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
}
@Override
public void onPause()
{
mOpenCvCameraView.disableView();
super.onPause();
}
@Override
public void onResume()
{
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback);
}
public void onDestroy() {
super.onDestroy();
mOpenCvCameraView.disableView();
}
@Override
@ -154,4 +164,203 @@ public class ImageManipulationsActivity extends Activity {
viewMode = VIEW_MODE_POSTERIZE;
return true;
}
public void onCameraViewStarted(int width, int height) {
mGray = new Mat();
mRgba = new Mat();
mIntermediateMat = new Mat();
mSize0 = new Size();
mHist = new Mat();
mChannels = new MatOfInt[] { new MatOfInt(0), new MatOfInt(1), new MatOfInt(2) };
mHistSizeNum = 25;
mBuff = new float[mHistSizeNum];
mHistSize = new MatOfInt(mHistSizeNum);
mRanges = new MatOfFloat(0f, 256f);
mMat0 = new Mat();
mColorsRGB = new Scalar[] { new Scalar(200, 0, 0, 255), new Scalar(0, 200, 0, 255), new Scalar(0, 0, 200, 255) };
mColorsHue = new Scalar[] {
new Scalar(255, 0, 0, 255), new Scalar(255, 60, 0, 255), new Scalar(255, 120, 0, 255), new Scalar(255, 180, 0, 255), new Scalar(255, 240, 0, 255),
new Scalar(215, 213, 0, 255), new Scalar(150, 255, 0, 255), new Scalar(85, 255, 0, 255), new Scalar(20, 255, 0, 255), new Scalar(0, 255, 30, 255),
new Scalar(0, 255, 85, 255), new Scalar(0, 255, 150, 255), new Scalar(0, 255, 215, 255), new Scalar(0, 234, 255, 255), new Scalar(0, 170, 255, 255),
new Scalar(0, 120, 255, 255), new Scalar(0, 60, 255, 255), new Scalar(0, 0, 255, 255), new Scalar(64, 0, 255, 255), new Scalar(120, 0, 255, 255),
new Scalar(180, 0, 255, 255), new Scalar(255, 0, 255, 255), new Scalar(255, 0, 215, 255), new Scalar(255, 0, 85, 255), new Scalar(255, 0, 0, 255)
};
mWhilte = Scalar.all(255);
mP1 = new Point();
mP2 = new Point();
// Fill sepia kernel
mSepiaKernel = new Mat(4, 4, CvType.CV_32F);
mSepiaKernel.put(0, 0, /* R */0.189f, 0.769f, 0.393f, 0f);
mSepiaKernel.put(1, 0, /* G */0.168f, 0.686f, 0.349f, 0f);
mSepiaKernel.put(2, 0, /* B */0.131f, 0.534f, 0.272f, 0f);
mSepiaKernel.put(3, 0, /* A */0.000f, 0.000f, 0.000f, 1f);
}
private void CreateAuxiliaryMats() {
if (mRgba.empty())
return;
mSizeRgba = mRgba.size();
int rows = (int) mSizeRgba.height;
int cols = (int) mSizeRgba.width;
int left = cols / 8;
int top = rows / 8;
int width = cols * 3 / 4;
int height = rows * 3 / 4;
if (mRgbaInnerWindow == null)
mRgbaInnerWindow = mRgba.submat(top, top + height, left, left + width);
mSizeRgbaInner = mRgbaInnerWindow.size();
if (mGrayInnerWindow == null && !mGray.empty())
mGrayInnerWindow = mGray.submat(top, top + height, left, left + width);
if (mBlurWindow == null)
mBlurWindow = mRgba.submat(0, rows, cols / 3, cols * 2 / 3);
if (mZoomCorner == null)
mZoomCorner = mRgba.submat(0, rows / 2 - rows / 10, 0, cols / 2 - cols / 10);
if (mZoomWindow == null)
mZoomWindow = mRgba.submat(rows / 2 - 9 * rows / 100, rows / 2 + 9 * rows / 100, cols / 2 - 9 * cols / 100, cols / 2 + 9 * cols / 100);
}
public void onCameraViewStopped() {
// Explicitly deallocate Mats
if (mZoomWindow != null)
mZoomWindow.release();
if (mZoomCorner != null)
mZoomCorner.release();
if (mBlurWindow != null)
mBlurWindow.release();
if (mGrayInnerWindow != null)
mGrayInnerWindow.release();
if (mRgbaInnerWindow != null)
mRgbaInnerWindow.release();
if (mRgba != null)
mRgba.release();
if (mGray != null)
mGray.release();
if (mIntermediateMat != null)
mIntermediateMat.release();
mRgba = null;
mGray = null;
mIntermediateMat = null;
mRgbaInnerWindow = null;
mGrayInnerWindow = null;
mBlurWindow = null;
mZoomCorner = null;
mZoomWindow = null;
}
public Mat onCameraFrame(Mat inputFrame) {
inputFrame.copyTo(mRgba);
switch (ImageManipulationsActivity.viewMode) {
case ImageManipulationsActivity.VIEW_MODE_RGBA:
break;
case ImageManipulationsActivity.VIEW_MODE_HIST:
if ((mSizeRgba == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height))
CreateAuxiliaryMats();
int thikness = (int) (mSizeRgba.width / (mHistSizeNum + 10) / 5);
if(thikness > 5) thikness = 5;
int offset = (int) ((mSizeRgba.width - (5*mHistSizeNum + 4*10)*thikness)/2);
// RGB
for(int c=0; c<3; c++) {
Imgproc.calcHist(Arrays.asList(mRgba), mChannels[c], mMat0, mHist, mHistSize, mRanges);
Core.normalize(mHist, mHist, mSizeRgba.height/2, 0, Core.NORM_INF);
mHist.get(0, 0, mBuff);
for(int h=0; h<mHistSizeNum; h++) {
mP1.x = mP2.x = offset + (c * (mHistSizeNum + 10) + h) * thikness;
mP1.y = mSizeRgba.height-1;
mP2.y = mP1.y - 2 - (int)mBuff[h];
Core.line(mRgba, mP1, mP2, mColorsRGB[c], thikness);
}
}
// Value and Hue
Imgproc.cvtColor(mRgba, mIntermediateMat, Imgproc.COLOR_RGB2HSV_FULL);
// Value
Imgproc.calcHist(Arrays.asList(mIntermediateMat), mChannels[2], mMat0, mHist, mHistSize, mRanges);
Core.normalize(mHist, mHist, mSizeRgba.height/2, 0, Core.NORM_INF);
mHist.get(0, 0, mBuff);
for(int h=0; h<mHistSizeNum; h++) {
mP1.x = mP2.x = offset + (3 * (mHistSizeNum + 10) + h) * thikness;
mP1.y = mSizeRgba.height-1;
mP2.y = mP1.y - 2 - (int)mBuff[h];
Core.line(mRgba, mP1, mP2, mWhilte, thikness);
}
// Hue
Imgproc.calcHist(Arrays.asList(mIntermediateMat), mChannels[0], mMat0, mHist, mHistSize, mRanges);
Core.normalize(mHist, mHist, mSizeRgba.height/2, 0, Core.NORM_INF);
mHist.get(0, 0, mBuff);
for(int h=0; h<mHistSizeNum; h++) {
mP1.x = mP2.x = offset + (4 * (mHistSizeNum + 10) + h) * thikness;
mP1.y = mSizeRgba.height-1;
mP2.y = mP1.y - 2 - (int)mBuff[h];
Core.line(mRgba, mP1, mP2, mColorsHue[h], thikness);
}
break;
case ImageManipulationsActivity.VIEW_MODE_CANNY:
if ((mRgbaInnerWindow == null) || (mGrayInnerWindow == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height))
CreateAuxiliaryMats();
Imgproc.Canny(mRgbaInnerWindow, mIntermediateMat, 80, 90);
Imgproc.cvtColor(mIntermediateMat, mRgbaInnerWindow, Imgproc.COLOR_GRAY2BGRA, 4);
break;
case ImageManipulationsActivity.VIEW_MODE_SOBEL:
Imgproc.cvtColor(mRgba, mGray, Imgproc.COLOR_RGBA2GRAY);
if ((mRgbaInnerWindow == null) || (mGrayInnerWindow == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height))
CreateAuxiliaryMats();
Imgproc.Sobel(mGrayInnerWindow, mIntermediateMat, CvType.CV_8U, 1, 1);
Core.convertScaleAbs(mIntermediateMat, mIntermediateMat, 10, 0);
Imgproc.cvtColor(mIntermediateMat, mRgbaInnerWindow, Imgproc.COLOR_GRAY2BGRA, 4);
break;
case ImageManipulationsActivity.VIEW_MODE_SEPIA:
Core.transform(mRgba, mRgba, mSepiaKernel);
break;
case ImageManipulationsActivity.VIEW_MODE_ZOOM:
if ((mZoomCorner == null) || (mZoomWindow == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height))
CreateAuxiliaryMats();
Imgproc.resize(mZoomWindow, mZoomCorner, mZoomCorner.size());
Size wsize = mZoomWindow.size();
Core.rectangle(mZoomWindow, new Point(1, 1), new Point(wsize.width - 2, wsize.height - 2), new Scalar(255, 0, 0, 255), 2);
break;
case ImageManipulationsActivity.VIEW_MODE_PIXELIZE:
if ((mRgbaInnerWindow == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height))
CreateAuxiliaryMats();
Imgproc.resize(mRgbaInnerWindow, mIntermediateMat, mSize0, 0.1, 0.1, Imgproc.INTER_NEAREST);
Imgproc.resize(mIntermediateMat, mRgbaInnerWindow, mSizeRgbaInner, 0., 0., Imgproc.INTER_NEAREST);
break;
case ImageManipulationsActivity.VIEW_MODE_POSTERIZE:
if ((mRgbaInnerWindow == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height))
CreateAuxiliaryMats();
/*
Imgproc.cvtColor(mRgbaInnerWindow, mIntermediateMat, Imgproc.COLOR_RGBA2RGB);
Imgproc.pyrMeanShiftFiltering(mIntermediateMat, mIntermediateMat, 5, 50);
Imgproc.cvtColor(mIntermediateMat, mRgbaInnerWindow, Imgproc.COLOR_RGB2RGBA);
*/
Imgproc.Canny(mRgbaInnerWindow, mIntermediateMat, 80, 90);
mRgbaInnerWindow.setTo(new Scalar(0, 0, 0, 255), mIntermediateMat);
Core.convertScaleAbs(mRgbaInnerWindow, mIntermediateMat, 1./16, 0);
Core.convertScaleAbs(mIntermediateMat, mRgbaInnerWindow, 16, 0);
break;
}
return mRgba;
}
}

View File

@ -1,283 +0,0 @@
package org.opencv.samples.imagemanipulations;
import java.util.Arrays;
import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfFloat;
import org.opencv.core.MatOfInt;
import org.opencv.core.Size;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.core.CvType;
import org.opencv.imgproc.Imgproc;
import org.opencv.highgui.Highgui;
import org.opencv.highgui.VideoCapture;
import android.content.Context;
import android.graphics.Bitmap;
import android.util.Log;
import android.view.SurfaceHolder;
class ImageManipulationsView extends SampleCvViewBase {
private static final String TAG = "OCVSample::View";
private Size mSize0;
private Size mSizeRgba;
private Size mSizeRgbaInner;
private Mat mRgba;
private Mat mGray;
private Mat mIntermediateMat;
private Mat mHist;
private Mat mMat0;
private MatOfInt mChannels[];
private MatOfInt mHistSize;
private int mHistSizeNum;
private MatOfFloat mRanges;
private Scalar mColorsRGB[];
private Scalar mColorsHue[];
private Scalar mWhilte;
private Point mP1;
private Point mP2;
private float mBuff[];
private Mat mRgbaInnerWindow;
private Mat mGrayInnerWindow;
private Mat mBlurWindow;
private Mat mZoomWindow;
private Mat mZoomCorner;
private Mat mSepiaKernel;
public ImageManipulationsView(Context context) {
super(context);
mSepiaKernel = new Mat(4, 4, CvType.CV_32F);
mSepiaKernel.put(0, 0, /* R */0.189f, 0.769f, 0.393f, 0f);
mSepiaKernel.put(1, 0, /* G */0.168f, 0.686f, 0.349f, 0f);
mSepiaKernel.put(2, 0, /* B */0.131f, 0.534f, 0.272f, 0f);
mSepiaKernel.put(3, 0, /* A */0.000f, 0.000f, 0.000f, 1f);
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "called surfaceCreated");
synchronized (this) {
// initialize Mats before usage
mGray = new Mat();
mRgba = new Mat();
mIntermediateMat = new Mat();
mSize0 = new Size();
mHist = new Mat();
mChannels = new MatOfInt[] { new MatOfInt(0), new MatOfInt(1), new MatOfInt(2) };
mHistSizeNum = 25;
mBuff = new float[mHistSizeNum];
mHistSize = new MatOfInt(mHistSizeNum);
mRanges = new MatOfFloat(0f, 256f);
mMat0 = new Mat();
mColorsRGB = new Scalar[] { new Scalar(200, 0, 0, 255), new Scalar(0, 200, 0, 255), new Scalar(0, 0, 200, 255) };
mColorsHue = new Scalar[] {
new Scalar(255, 0, 0, 255), new Scalar(255, 60, 0, 255), new Scalar(255, 120, 0, 255), new Scalar(255, 180, 0, 255), new Scalar(255, 240, 0, 255),
new Scalar(215, 213, 0, 255), new Scalar(150, 255, 0, 255), new Scalar(85, 255, 0, 255), new Scalar(20, 255, 0, 255), new Scalar(0, 255, 30, 255),
new Scalar(0, 255, 85, 255), new Scalar(0, 255, 150, 255), new Scalar(0, 255, 215, 255), new Scalar(0, 234, 255, 255), new Scalar(0, 170, 255, 255),
new Scalar(0, 120, 255, 255), new Scalar(0, 60, 255, 255), new Scalar(0, 0, 255, 255), new Scalar(64, 0, 255, 255), new Scalar(120, 0, 255, 255),
new Scalar(180, 0, 255, 255), new Scalar(255, 0, 255, 255), new Scalar(255, 0, 215, 255), new Scalar(255, 0, 85, 255), new Scalar(255, 0, 0, 255)
};
mWhilte = Scalar.all(255);
mP1 = new Point();
mP2 = new Point();
}
super.surfaceCreated(holder);
}
private void CreateAuxiliaryMats() {
if (mRgba.empty())
return;
mSizeRgba = mRgba.size();
int rows = (int) mSizeRgba.height;
int cols = (int) mSizeRgba.width;
int left = cols / 8;
int top = rows / 8;
int width = cols * 3 / 4;
int height = rows * 3 / 4;
if (mRgbaInnerWindow == null)
mRgbaInnerWindow = mRgba.submat(top, top + height, left, left + width);
mSizeRgbaInner = mRgbaInnerWindow.size();
if (mGrayInnerWindow == null && !mGray.empty())
mGrayInnerWindow = mGray.submat(top, top + height, left, left + width);
if (mBlurWindow == null)
mBlurWindow = mRgba.submat(0, rows, cols / 3, cols * 2 / 3);
if (mZoomCorner == null)
mZoomCorner = mRgba.submat(0, rows / 2 - rows / 10, 0, cols / 2 - cols / 10);
if (mZoomWindow == null)
mZoomWindow = mRgba.submat(rows / 2 - 9 * rows / 100, rows / 2 + 9 * rows / 100, cols / 2 - 9 * cols / 100, cols / 2 + 9 * cols / 100);
}
@Override
protected Bitmap processFrame(VideoCapture capture) {
switch (ImageManipulationsActivity.viewMode) {
case ImageManipulationsActivity.VIEW_MODE_RGBA:
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
break;
case ImageManipulationsActivity.VIEW_MODE_HIST:
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
if ((mSizeRgba == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height))
CreateAuxiliaryMats();
int thikness = (int) (mSizeRgba.width / (mHistSizeNum + 10) / 5);
if(thikness > 5) thikness = 5;
int offset = (int) ((mSizeRgba.width - (5*mHistSizeNum + 4*10)*thikness)/2);
// RGB
for(int c=0; c<3; c++) {
Imgproc.calcHist(Arrays.asList(mRgba), mChannels[c], mMat0, mHist, mHistSize, mRanges);
Core.normalize(mHist, mHist, mSizeRgba.height/2, 0, Core.NORM_INF);
mHist.get(0, 0, mBuff);
for(int h=0; h<mHistSizeNum; h++) {
mP1.x = mP2.x = offset + (c * (mHistSizeNum + 10) + h) * thikness;
mP1.y = mSizeRgba.height-1;
mP2.y = mP1.y - 2 - (int)mBuff[h];
Core.line(mRgba, mP1, mP2, mColorsRGB[c], thikness);
}
}
// Value and Hue
Imgproc.cvtColor(mRgba, mIntermediateMat, Imgproc.COLOR_RGB2HSV_FULL);
// Value
Imgproc.calcHist(Arrays.asList(mIntermediateMat), mChannels[2], mMat0, mHist, mHistSize, mRanges);
Core.normalize(mHist, mHist, mSizeRgba.height/2, 0, Core.NORM_INF);
mHist.get(0, 0, mBuff);
for(int h=0; h<mHistSizeNum; h++) {
mP1.x = mP2.x = offset + (3 * (mHistSizeNum + 10) + h) * thikness;
mP1.y = mSizeRgba.height-1;
mP2.y = mP1.y - 2 - (int)mBuff[h];
Core.line(mRgba, mP1, mP2, mWhilte, thikness);
}
// Hue
Imgproc.calcHist(Arrays.asList(mIntermediateMat), mChannels[0], mMat0, mHist, mHistSize, mRanges);
Core.normalize(mHist, mHist, mSizeRgba.height/2, 0, Core.NORM_INF);
mHist.get(0, 0, mBuff);
for(int h=0; h<mHistSizeNum; h++) {
mP1.x = mP2.x = offset + (4 * (mHistSizeNum + 10) + h) * thikness;
mP1.y = mSizeRgba.height-1;
mP2.y = mP1.y - 2 - (int)mBuff[h];
Core.line(mRgba, mP1, mP2, mColorsHue[h], thikness);
}
break;
case ImageManipulationsActivity.VIEW_MODE_CANNY:
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
if ((mRgbaInnerWindow == null) || (mGrayInnerWindow == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height))
CreateAuxiliaryMats();
Imgproc.Canny(mRgbaInnerWindow, mIntermediateMat, 80, 90);
Imgproc.cvtColor(mIntermediateMat, mRgbaInnerWindow, Imgproc.COLOR_GRAY2BGRA, 4);
break;
case ImageManipulationsActivity.VIEW_MODE_SOBEL:
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
if ((mRgbaInnerWindow == null) || (mGrayInnerWindow == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height))
CreateAuxiliaryMats();
Imgproc.Sobel(mGrayInnerWindow, mIntermediateMat, CvType.CV_8U, 1, 1);
Core.convertScaleAbs(mIntermediateMat, mIntermediateMat, 10, 0);
Imgproc.cvtColor(mIntermediateMat, mRgbaInnerWindow, Imgproc.COLOR_GRAY2BGRA, 4);
break;
case ImageManipulationsActivity.VIEW_MODE_SEPIA:
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
Core.transform(mRgba, mRgba, mSepiaKernel);
break;
case ImageManipulationsActivity.VIEW_MODE_ZOOM:
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
if ((mZoomCorner == null) || (mZoomWindow == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height))
CreateAuxiliaryMats();
Imgproc.resize(mZoomWindow, mZoomCorner, mZoomCorner.size());
Size wsize = mZoomWindow.size();
Core.rectangle(mZoomWindow, new Point(1, 1), new Point(wsize.width - 2, wsize.height - 2), new Scalar(255, 0, 0, 255), 2);
break;
case ImageManipulationsActivity.VIEW_MODE_PIXELIZE:
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
if ((mRgbaInnerWindow == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height))
CreateAuxiliaryMats();
Imgproc.resize(mRgbaInnerWindow, mIntermediateMat, mSize0, 0.1, 0.1, Imgproc.INTER_NEAREST);
Imgproc.resize(mIntermediateMat, mRgbaInnerWindow, mSizeRgbaInner, 0., 0., Imgproc.INTER_NEAREST);
break;
case ImageManipulationsActivity.VIEW_MODE_POSTERIZE:
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
if ((mRgbaInnerWindow == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height))
CreateAuxiliaryMats();
/*
Imgproc.cvtColor(mRgbaInnerWindow, mIntermediateMat, Imgproc.COLOR_RGBA2RGB);
Imgproc.pyrMeanShiftFiltering(mIntermediateMat, mIntermediateMat, 5, 50);
Imgproc.cvtColor(mIntermediateMat, mRgbaInnerWindow, Imgproc.COLOR_RGB2RGBA);
*/
Imgproc.Canny(mRgbaInnerWindow, mIntermediateMat, 80, 90);
mRgbaInnerWindow.setTo(new Scalar(0, 0, 0, 255), mIntermediateMat);
Core.convertScaleAbs(mRgbaInnerWindow, mIntermediateMat, 1./16, 0);
Core.convertScaleAbs(mIntermediateMat, mRgbaInnerWindow, 16, 0);
break;
}
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);
try {
Utils.matToBitmap(mRgba, bmp);
return bmp;
} catch(Exception e) {
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle();
return null;
}
}
@Override
public void run() {
super.run();
synchronized (this) {
// Explicitly deallocate Mats
if (mZoomWindow != null)
mZoomWindow.release();
if (mZoomCorner != null)
mZoomCorner.release();
if (mBlurWindow != null)
mBlurWindow.release();
if (mGrayInnerWindow != null)
mGrayInnerWindow.release();
if (mRgbaInnerWindow != null)
mRgbaInnerWindow.release();
if (mRgba != null)
mRgba.release();
if (mGray != null)
mGray.release();
if (mIntermediateMat != null)
mIntermediateMat.release();
mRgba = null;
mGray = null;
mIntermediateMat = null;
mRgbaInnerWindow = null;
mGrayInnerWindow = null;
mBlurWindow = null;
mZoomCorner = null;
mZoomWindow = null;
}
}
}

View File

@ -1,124 +0,0 @@
package org.opencv.samples.imagemanipulations;
import java.util.List;
import org.opencv.core.Size;
import org.opencv.highgui.VideoCapture;
import org.opencv.highgui.Highgui;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "OCVSample::BaseView";
private SurfaceHolder mHolder;
private VideoCapture mCamera;
private FpsMeter mFps;
public SampleCvViewBase(Context context) {
super(context);
mHolder = getHolder();
mHolder.addCallback(this);
mFps = new FpsMeter();
Log.i(TAG, "Instantiated new " + this.getClass());
}
public synchronized boolean openCamera() {
Log.i(TAG, "Opening Camera");
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
releaseCamera();
Log.e(TAG, "Can't open native camera");
return false;
}
return true;
}
public synchronized void releaseCamera() {
Log.i(TAG, "Releasing Camera");
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
public synchronized void setupCamera(int width, int height) {
if (mCamera != null && mCamera.isOpened()) {
Log.i(TAG, "Setup Camera - " + width + "x" + height);
List<Size> sizes = mCamera.getSupportedPreviewSizes();
int mFrameWidth = width;
int mFrameHeight = height;
// selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = (int) size.width;
mFrameHeight = (int) size.height;
minDiff = Math.abs(size.height - height);
}
}
}
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "called surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "called surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "called surfaceDestroyed");
}
protected abstract Bitmap processFrame(VideoCapture capture);
public void run() {
Log.i(TAG, "Started processing thread");
mFps.init();
while (true) {
Bitmap bmp = null;
synchronized (this) {
if (mCamera == null)
break;
if (!mCamera.grab()) {
Log.e(TAG, "mCamera.grab() failed");
break;
}
bmp = processFrame(mCamera);
mFps.measure();
}
if (bmp != null) {
Canvas canvas = mHolder.lockCanvas();
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
canvas.drawBitmap(bmp, (canvas.getWidth() - bmp.getWidth()) / 2, (canvas.getHeight() - bmp.getHeight()), null);
mFps.draw(canvas, (canvas.getWidth() - bmp.getWidth()) / 2, 0);
mHolder.unlockCanvasAndPost(canvas);
}
bmp.recycle();
}
}
Log.i(TAG, "Finished processing thread");
}
}

View File

@ -1,8 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>
<classpathentry exported="true" kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="output" path="bin/classes"/>
</classpath>

View File

@ -5,16 +5,11 @@ import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
public class Sample0Base extends Activity {
private static final String TAG = "OCVSample::Activity";
private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewGray;
private Sample0View mView;
public Sample0Base() {
@ -55,22 +50,4 @@ public class Sample0Base extends Activity {
mView = new Sample0View(this);
setContentView(mView);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
Log.i(TAG, "called onCreateOptionsMenu");
mItemPreviewRGBA = menu.add("Preview RGBA");
mItemPreviewGray = menu.add("Preview GRAY");
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item == mItemPreviewRGBA)
mView.setViewMode(Sample0View.VIEW_MODE_RGBA);
else if (item == mItemPreviewGray)
mView.setViewMode(Sample0View.VIEW_MODE_GRAY);
return true;
}
}

View File

@ -3,6 +3,7 @@ package org.opencv.samples.tutorial0;
import java.io.IOException;
import java.util.List;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
@ -43,7 +44,8 @@ public abstract class SampleViewBase extends SurfaceView implements SurfaceHolde
return mFrameHeight;
}
public void setPreview() throws IOException {
@TargetApi(11)
public void setPreview() throws IOException {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mSf = new SurfaceTexture(10);
mCamera.setPreviewTexture( mSf );

View File

@ -1,8 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="output" path="bin/classes"/>
</classpath>

View File

@ -0,0 +1,11 @@
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<org.opencv.android.JavaCameraView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:id="@+id/tutorial1_activity_surface_view" />
</LinearLayout>

View File

@ -3,65 +3,29 @@ package org.opencv.samples.tutorial1;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Mat;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener;
import org.opencv.android.JavaCameraView;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
public class Sample1Java extends Activity {
public class Sample1Java extends Activity implements CvCameraViewListener {
private static final String TAG = "OCVSample::Activity";
private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewGray;
private MenuItem mItemPreviewCanny;
private Sample1View mView;
private JavaCameraView mOpenCvCameraView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Create and set View
mView = new Sample1View(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
finish();
}
});
MarketErrorMessage.show();
mOpenCvCameraView.enableView();
} break;
default:
{
@ -75,25 +39,6 @@ public class Sample1Java extends Activity {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
Log.i(TAG, "called onPause");
if (null != mView)
mView.releaseCamera();
super.onPause();
}
@Override
protected void onResume() {
Log.i(TAG, "called onResume");
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack)) {
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
@ -101,27 +46,39 @@ public class Sample1Java extends Activity {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.tutorial1_surface_view);
mOpenCvCameraView = (JavaCameraView)findViewById(R.id.tutorial1_activity_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
Log.i(TAG, "called onCreateOptionsMenu");
mItemPreviewRGBA = menu.add("Preview RGBA");
mItemPreviewGray = menu.add("Preview GRAY");
mItemPreviewCanny = menu.add("Canny");
return true;
public void onPause()
{
mOpenCvCameraView.disableView();
super.onPause();
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item == mItemPreviewRGBA) {
mView.setViewMode(Sample1View.VIEW_MODE_RGBA);
} else if (item == mItemPreviewGray) {
mView.setViewMode(Sample1View.VIEW_MODE_GRAY);
} else if (item == mItemPreviewCanny) {
mView.setViewMode(Sample1View.VIEW_MODE_CANNY);
}
return true;
public void onResume()
{
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback);
}
public void onDestroy() {
super.onDestroy();
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
}
public void onCameraViewStopped() {
}
public Mat onCameraFrame(Mat inputFrame) {
return inputFrame;
}
}

View File

@ -1,113 +0,0 @@
package org.opencv.samples.tutorial1;
import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;
import android.content.Context;
import android.graphics.Bitmap;
import android.util.Log;
class Sample1View extends SampleViewBase {
private static final String TAG = "OCVSample::View";
public static final int VIEW_MODE_RGBA = 0;
public static final int VIEW_MODE_GRAY = 1;
public static final int VIEW_MODE_CANNY = 2;
private Mat mYuv;
private Mat mRgba;
private Mat mGraySubmat;
private Mat mIntermediateMat;
private Bitmap mBitmap;
private int mViewMode;
public Sample1View(Context context) {
super(context);
mViewMode = VIEW_MODE_RGBA;
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPreviewStarted(int previewWidth, int previewHeight) {
Log.i(TAG, "called onPreviewStarted("+previewWidth+", "+previewHeight+")");
// initialize Mats before usage
mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1);
mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth());
mRgba = new Mat();
mIntermediateMat = new Mat();
mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888);
}
@Override
protected void onPreviewStopped() {
Log.i(TAG, "called onPreviewStopped");
if(mBitmap != null) {
mBitmap.recycle();
mBitmap = null;
}
synchronized (this) {
// Explicitly deallocate Mats
if (mYuv != null)
mYuv.release();
if (mRgba != null)
mRgba.release();
if (mGraySubmat != null)
mGraySubmat.release();
if (mIntermediateMat != null)
mIntermediateMat.release();
mYuv = null;
mRgba = null;
mGraySubmat = null;
mIntermediateMat = null;
}
}
@Override
protected Bitmap processFrame(byte[] data) {
mYuv.put(0, 0, data);
final int viewMode = mViewMode;
switch (viewMode) {
case VIEW_MODE_GRAY:
Imgproc.cvtColor(mGraySubmat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
break;
case VIEW_MODE_RGBA:
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420sp2RGB, 4);
Core.putText(mRgba, "OpenCV + Android", new Point(10, 100), 3/* CV_FONT_HERSHEY_COMPLEX */, 2, new Scalar(255, 0, 0, 255), 3);
break;
case VIEW_MODE_CANNY:
Imgproc.Canny(mGraySubmat, mIntermediateMat, 80, 100);
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
break;
}
Bitmap bmp = mBitmap;
try {
Utils.matToBitmap(mRgba, bmp);
} catch(Exception e) {
Log.e("org.opencv.samples.tutorial1", "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle();
bmp = null;
}
return bmp;
}
public void setViewMode(int viewMode) {
Log.i(TAG, "called setViewMode("+viewMode+")");
mViewMode = viewMode;
}
}

View File

@ -1,229 +0,0 @@
package org.opencv.samples.tutorial1;
import java.io.IOException;
import java.util.List;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.os.Build;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "OCVSample::BaseView";
private Camera mCamera;
private SurfaceHolder mHolder;
private int mFrameWidth;
private int mFrameHeight;
private byte[] mFrame;
private volatile boolean mThreadRun;
private byte[] mBuffer;
private SurfaceTexture mSf;
public SampleViewBase(Context context) {
super(context);
mHolder = getHolder();
mHolder.addCallback(this);
Log.i(TAG, "Instantiated new " + this.getClass());
}
public int getFrameWidth() {
return mFrameWidth;
}
public int getFrameHeight() {
return mFrameHeight;
}
public void setPreview() throws IOException {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mSf = new SurfaceTexture(10);
mCamera.setPreviewTexture( mSf );
}
else
mCamera.setPreviewDisplay(null);
}
public boolean openCamera() {
Log.i(TAG, "Opening Camera");
mCamera = null;
try {
mCamera = Camera.open();
}
catch (Exception e){
Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
}
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
try {
mCamera = Camera.open(camIdx);
}
catch (RuntimeException e) {
Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
}
}
}
if(mCamera == null) {
Log.e(TAG, "Can't open any camera");
return false;
}
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
synchronized (SampleViewBase.this) {
System.arraycopy(data, 0, mFrame, 0, data.length);
SampleViewBase.this.notify();
}
camera.addCallbackBuffer(mBuffer);
}
});
return true;
}
public void releaseCamera() {
Log.i(TAG, "Releasing Camera");
mThreadRun = false;
synchronized (this) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
onPreviewStopped();
}
public synchronized void setupCamera(int width, int height) {
if (mCamera != null) {
Log.i(TAG, "Setup Camera - " + width + "x" + height);
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be copied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Now we can start a preview */
mCamera.startPreview();
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "called surfaceChanged");
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// start preview with new settings
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "called surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "called surfaceDestroyed");
}
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
protected abstract Bitmap processFrame(byte[] data);
/**
* This method is called when the preview process is being started. It is called before the first frame delivered and processFrame is called
* It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing.
* @param previewWidth - the width of the preview frames that will be delivered via processFrame
* @param previewHeight - the height of the preview frames that will be delivered via processFrame
*/
protected abstract void onPreviewStarted(int previewWidtd, int previewHeight);
/**
* This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed.
* If the Bitmap object returned via processFrame is cached - it is a good time to recycle it.
* Any other resources used during the preview can be released.
*/
protected abstract void onPreviewStopped();
public void run() {
mThreadRun = true;
Log.i(TAG, "Started processing thread");
while (mThreadRun) {
Bitmap bmp = null;
synchronized (this) {
try {
this.wait();
if (!mThreadRun)
break;
bmp = processFrame(mFrame);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
if (bmp != null) {
Canvas canvas = mHolder.lockCanvas();
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
canvas.drawBitmap(bmp, (canvas.getWidth() - getFrameWidth()) / 2, (canvas.getHeight() - getFrameHeight()) / 2, null);
mHolder.unlockCanvasAndPost(canvas);
}
}
}
Log.i(TAG, "Finished processing thread");
}
}

View File

@ -1,8 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="output" path="bin/classes"/>
</classpath>

View File

@ -0,0 +1,11 @@
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<org.opencv.android.NativeCameraView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:id="@+id/tutorial2_activity_surface_view" />
</LinearLayout>

View File

@ -3,10 +3,17 @@ package org.opencv.samples.tutorial2;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.android.NativeCameraView;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener;
import org.opencv.highgui.Highgui;
import org.opencv.imgproc.Imgproc;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
@ -14,60 +21,30 @@ import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
public class Sample2NativeCamera extends Activity {
public class Sample2NativeCamera extends Activity implements CvCameraViewListener {
private static final String TAG = "OCVSample::Activity";
public static final int VIEW_MODE_RGBA = 0;
public static final int VIEW_MODE_GRAY = 1;
public static final int VIEW_MODE_CANNY = 2;
private static int viewMode = VIEW_MODE_RGBA;
private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewGray;
private MenuItem mItemPreviewCanny;
private Sample2View mView;
private Mat mRgba;
private Mat mIntermediateMat;
public static int viewMode = VIEW_MODE_RGBA;
private NativeCameraView mOpenCvCameraView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Create and set View
mView = new Sample2View(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
finish();
}
});
MarketErrorMessage.show();
mOpenCvCameraView.enableView();
} break;
default:
{
@ -81,25 +58,6 @@ public class Sample2NativeCamera extends Activity {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
Log.i(TAG, "called onPause");
if (null != mView)
mView.releaseCamera();
super.onPause();
}
@Override
protected void onResume() {
Log.i(TAG, "called onResume");
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack)) {
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
@ -107,6 +65,61 @@ public class Sample2NativeCamera extends Activity {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.tutorial2_surface_view);
mOpenCvCameraView = (NativeCameraView)findViewById(R.id.tutorial2_activity_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
}
@Override
public void onPause()
{
mOpenCvCameraView.disableView();
super.onPause();
}
@Override
public void onResume()
{
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback);
}
public void onDestroy() {
super.onDestroy();
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC4);
mIntermediateMat = new Mat(height, width, CvType.CV_8UC4);
}
public void onCameraViewStopped() {
mRgba.release();
mIntermediateMat.release();
}
public Mat onCameraFrame(Mat inputFrame) {
switch (Sample2NativeCamera.viewMode) {
case Sample2NativeCamera.VIEW_MODE_GRAY:
{
Imgproc.cvtColor(inputFrame, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
} break;
case Sample2NativeCamera.VIEW_MODE_RGBA:
{
inputFrame.copyTo(mRgba);
Core.putText(mRgba, "OpenCV+Android", new Point(10, 50), 3, 1, new Scalar(255, 0, 0, 255), 2);
} break;
case Sample2NativeCamera.VIEW_MODE_CANNY:
{
Imgproc.Canny(inputFrame, mIntermediateMat, 80, 100);
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
} break;
}
return mRgba;
}
@Override
@ -122,11 +135,21 @@ public class Sample2NativeCamera extends Activity {
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item == mItemPreviewRGBA)
{
mOpenCvCameraView.SetCaptureFormat(Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
viewMode = VIEW_MODE_RGBA;
}
else if (item == mItemPreviewGray)
{
mOpenCvCameraView.SetCaptureFormat(Highgui.CV_CAP_ANDROID_GREY_FRAME);
viewMode = VIEW_MODE_GRAY;
}
else if (item == mItemPreviewCanny)
{
mOpenCvCameraView.SetCaptureFormat(Highgui.CV_CAP_ANDROID_GREY_FRAME);
viewMode = VIEW_MODE_CANNY;
}
return true;
}
}

View File

@ -1,90 +0,0 @@
package org.opencv.samples.tutorial2;
import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.highgui.Highgui;
import org.opencv.highgui.VideoCapture;
import org.opencv.imgproc.Imgproc;
import android.content.Context;
import android.graphics.Bitmap;
import android.util.Log;
import android.view.SurfaceHolder;
class Sample2View extends SampleCvViewBase {
private static final String TAG = "OCVSample::View";
private Mat mRgba;
private Mat mGray;
private Mat mIntermediateMat;
public Sample2View(Context context) {
super(context);
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "called surfaceCreated");
synchronized (this) {
// initialize Mats before usage
mGray = new Mat();
mRgba = new Mat();
mIntermediateMat = new Mat();
}
super.surfaceCreated(holder);
}
@Override
protected Bitmap processFrame(VideoCapture capture) {
switch (Sample2NativeCamera.viewMode) {
case Sample2NativeCamera.VIEW_MODE_GRAY:
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
Imgproc.cvtColor(mGray, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
break;
case Sample2NativeCamera.VIEW_MODE_RGBA:
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
Core.putText(mRgba, "OpenCV+Android", new Point(10, 50), 3, 1, new Scalar(255, 0, 0, 255), 2);
break;
case Sample2NativeCamera.VIEW_MODE_CANNY:
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
Imgproc.Canny(mGray, mIntermediateMat, 80, 100);
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
break;
}
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);
try {
Utils.matToBitmap(mRgba, bmp);
return bmp;
} catch(Exception e) {
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle();
return null;
}
}
@Override
public void run() {
super.run();
synchronized (this) {
// Explicitly deallocate Mats
if (mRgba != null)
mRgba.release();
if (mGray != null)
mGray.release();
if (mIntermediateMat != null)
mIntermediateMat.release();
mRgba = null;
mGray = null;
mIntermediateMat = null;
}
}
}

View File

@ -1,117 +0,0 @@
package org.opencv.samples.tutorial2;
import java.util.List;
import org.opencv.core.Size;
import org.opencv.highgui.VideoCapture;
import org.opencv.highgui.Highgui;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "OCVSample::BaseView";
private SurfaceHolder mHolder;
private VideoCapture mCamera;
public SampleCvViewBase(Context context) {
super(context);
mHolder = getHolder();
mHolder.addCallback(this);
Log.i(TAG, "Instantiated new " + this.getClass());
}
public synchronized boolean openCamera() {
Log.i(TAG, "Opening Camera");
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
releaseCamera();
Log.e(TAG, "Can't open native camera");
return false;
}
return true;
}
public synchronized void releaseCamera() {
Log.i(TAG, "Releasing Camera");
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
public synchronized void setupCamera(int width, int height) {
if (mCamera != null && mCamera.isOpened()) {
Log.i(TAG, "Setup Camera - " + width + "x" + height);
List<Size> sizes = mCamera.getSupportedPreviewSizes();
int mFrameWidth = width;
int mFrameHeight = height;
// selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = (int) size.width;
mFrameHeight = (int) size.height;
minDiff = Math.abs(size.height - height);
}
}
}
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "called surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "called surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "called surfaceDestroyed");
}
protected abstract Bitmap processFrame(VideoCapture capture);
public void run() {
Log.i(TAG, "Started processing thread");
while (true) {
Bitmap bmp = null;
synchronized (this) {
if (mCamera == null)
break;
if (!mCamera.grab()) {
Log.e(TAG, "mCamera.grab() failed");
break;
}
bmp = processFrame(mCamera);
}
if (bmp != null) {
Canvas canvas = mHolder.lockCanvas();
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
canvas.drawBitmap(bmp, (canvas.getWidth() - bmp.getWidth()) / 2, (canvas.getHeight() - bmp.getHeight()) / 2, null);
mHolder.unlockCanvasAndPost(canvas);
}
bmp.recycle();
}
}
Log.i(TAG, "Finished processing thread");
}
}

View File

@ -1,8 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>
<classpathentry exported="true" kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="output" path="bin/classes"/>
</classpath>

View File

@ -8,28 +8,16 @@ using namespace std;
using namespace cv;
extern "C" {
JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial3_Sample3View_FindFeatures(JNIEnv* env, jobject, jint width, jint height, jbyteArray yuv, jintArray bgra)
JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial3_Sample3Native_FindFeatures(JNIEnv*, jobject, jlong addrGray, jlong addrRgba)
{
jbyte* _yuv = env->GetByteArrayElements(yuv, 0);
jint* _bgra = env->GetIntArrayElements(bgra, 0);
Mat myuv(height + height/2, width, CV_8UC1, (unsigned char *)_yuv);
Mat mbgra(height, width, CV_8UC4, (unsigned char *)_bgra);
Mat mgray(height, width, CV_8UC1, (unsigned char *)_yuv);
//Please make attention about BGRA byte order
//ARGB stored in java as int array becomes BGRA at native level
cvtColor(myuv, mbgra, CV_YUV420sp2BGR, 4);
Mat* pMatGr=(Mat*)addrGray;
Mat* pMatRgb=(Mat*)addrRgba;
vector<KeyPoint> v;
FastFeatureDetector detector(50);
detector.detect(mgray, v);
detector.detect(*pMatGr, v);
for( size_t i = 0; i < v.size(); i++ )
circle(mbgra, Point(v[i].pt.x, v[i].pt.y), 10, Scalar(0,0,255,255));
env->ReleaseIntArrayElements(bgra, _bgra, 0);
env->ReleaseByteArrayElements(yuv, _yuv, 0);
circle(*pMatRgb, Point(v[i].pt.x, v[i].pt.y), 10, Scalar(255,0,0,255));
}
}

View File

@ -0,0 +1,11 @@
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<org.opencv.android.JavaCameraView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:id="@+id/tutorial4_activity_surface_view" />
</LinearLayout>

View File

@ -3,21 +3,26 @@ package org.opencv.samples.tutorial3;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.android.JavaCameraView;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener;
import org.opencv.imgproc.Imgproc;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.util.Log;
import android.view.Window;
import android.view.WindowManager;
public class Sample3Native extends Activity {
public class Sample3Native extends Activity implements CvCameraViewListener {
private static final String TAG = "OCVSample::Activity";
private Sample3View mView;
private Mat mRgba;
private Mat mGrayMat;
private JavaCameraView mOpenCvCameraView;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
@ -28,37 +33,7 @@ public class Sample3Native extends Activity {
// Load native library after(!) OpenCV initialization
System.loadLibrary("native_sample");
// Create and set View
mView = new Sample3View(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
finish();
}
});
MarketErrorMessage.show();
mOpenCvCameraView.enableView();
} break;
default:
{
@ -72,25 +47,6 @@ public class Sample3Native extends Activity {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
Log.i(TAG, "called onPause");
if (null != mView)
mView.releaseCamera();
super.onPause();
}
@Override
protected void onResume() {
Log.i(TAG, "called onResume");
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack)) {
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
@ -98,5 +54,49 @@ public class Sample3Native extends Activity {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.tutorial3_surface_view);
mOpenCvCameraView = (JavaCameraView)findViewById(R.id.tutorial4_activity_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
}
@Override
public void onPause()
{
mOpenCvCameraView.disableView();
super.onPause();
}
@Override
public void onResume()
{
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback);
}
public void onDestroy() {
super.onDestroy();
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC4);
mGrayMat = new Mat(height, width, CvType.CV_8UC1);
}
public void onCameraViewStopped() {
mRgba.release();
mGrayMat.release();
}
public Mat onCameraFrame(Mat inputFrame) {
inputFrame.copyTo(mRgba);
Imgproc.cvtColor(mRgba, mGrayMat, Imgproc.COLOR_RGBA2GRAY);
FindFeatures(mGrayMat.getNativeObjAddr(), mRgba.getNativeObjAddr());
return mRgba;
}
public native void FindFeatures(long matAddrGr, long matAddrRgba);
}

View File

@ -1,49 +0,0 @@
package org.opencv.samples.tutorial3;
import android.content.Context;
import android.graphics.Bitmap;
import android.util.Log;
class Sample3View extends SampleViewBase {
private static final String TAG = "OCVSample::View";
private int mFrameSize;
private Bitmap mBitmap;
private int[] mRGBA;
public Sample3View(Context context) {
super(context);
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPreviewStarted(int previewWidth, int previewHeight) {
Log.i(TAG, "called onPreviewStarted("+previewWidth+", "+previewHeight+")");
mFrameSize = previewWidth * previewHeight;
mRGBA = new int[mFrameSize];
mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888);
}
@Override
protected void onPreviewStopped() {
if(mBitmap != null) {
mBitmap.recycle();
mBitmap = null;
}
mRGBA = null;
}
@Override
protected Bitmap processFrame(byte[] data) {
int[] rgba = mRGBA;
FindFeatures(getFrameWidth(), getFrameHeight(), data, rgba);
Bitmap bmp = mBitmap;
bmp.setPixels(rgba, 0/* offset */, getFrameWidth() /* stride */, 0, 0, getFrameWidth(), getFrameHeight());
return bmp;
}
public native void FindFeatures(int width, int height, byte yuv[], int[] rgba);
}

View File

@ -1,229 +0,0 @@
package org.opencv.samples.tutorial3;
import java.io.IOException;
import java.util.List;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.os.Build;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "OCVSample::BaseView";
private Camera mCamera;
private SurfaceHolder mHolder;
private int mFrameWidth;
private int mFrameHeight;
private byte[] mFrame;
private volatile boolean mThreadRun;
private byte[] mBuffer;
private SurfaceTexture mSf;
public SampleViewBase(Context context) {
super(context);
mHolder = getHolder();
mHolder.addCallback(this);
Log.i(TAG, "Instantiated new " + this.getClass());
}
public int getFrameWidth() {
return mFrameWidth;
}
public int getFrameHeight() {
return mFrameHeight;
}
public void setPreview() throws IOException {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mSf = new SurfaceTexture(10);
mCamera.setPreviewTexture( mSf );
}
else
mCamera.setPreviewDisplay(null);
}
public boolean openCamera() {
Log.i(TAG, "Opening Camera");
mCamera = null;
try {
mCamera = Camera.open();
}
catch (Exception e){
Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
}
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
try {
mCamera = Camera.open(camIdx);
}
catch (RuntimeException e) {
Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
}
}
}
if(mCamera == null) {
Log.e(TAG, "Can't open any camera");
return false;
}
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
synchronized (SampleViewBase.this) {
System.arraycopy(data, 0, mFrame, 0, data.length);
SampleViewBase.this.notify();
}
camera.addCallbackBuffer(mBuffer);
}
});
return true;
}
public void releaseCamera() {
Log.i(TAG, "Releasing Camera");
mThreadRun = false;
synchronized (this) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
mCamera = null;
}
}
onPreviewStopped();
}
public synchronized void setupCamera(int width, int height) {
if (mCamera != null) {
Log.i(TAG, "Setup Camera - " + width + "x" + height);
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be copied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Now we can start a preview */
mCamera.startPreview();
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "called surfaceChanged");
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// start preview with new settings
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "called surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "called surfaceDestroyed");
}
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
protected abstract Bitmap processFrame(byte[] data);
/**
* This method is called when the preview process is being started. It is called before the first frame delivered and processFrame is called
* It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing.
* @param previewWidth - the width of the preview frames that will be delivered via processFrame
* @param previewHeight - the height of the preview frames that will be delivered via processFrame
*/
protected abstract void onPreviewStarted(int previewWidtd, int previewHeight);
/**
* This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed.
* If the Bitmap object returned via processFrame is cached - it is a good time to recycle it.
* Any other resources used during the preview can be released.
*/
protected abstract void onPreviewStopped();
public void run() {
mThreadRun = true;
Log.i(TAG, "Started processing thread");
while (mThreadRun) {
Bitmap bmp = null;
synchronized (this) {
try {
this.wait();
if (!mThreadRun)
break;
bmp = processFrame(mFrame);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
if (bmp != null) {
Canvas canvas = mHolder.lockCanvas();
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
canvas.drawBitmap(bmp, (canvas.getWidth() - getFrameWidth()) / 2, (canvas.getHeight() - getFrameHeight()) / 2, null);
mHolder.unlockCanvasAndPost(canvas);
}
}
}
Log.i(TAG, "Finished processing thread");
}
}

View File

@ -1,8 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>
<classpathentry kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="gen"/>
<classpathentry kind="output" path="bin/classes"/>
</classpath>

View File

@ -8,7 +8,7 @@ using namespace std;
using namespace cv;
extern "C" {
JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial4_Sample4View_FindFeatures(JNIEnv*, jobject, jlong addrGray, jlong addrRgba)
JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial4_Sample4Mixed_FindFeatures(JNIEnv*, jobject, jlong addrGray, jlong addrRgba)
{
Mat* pMatGr=(Mat*)addrGray;
Mat* pMatRgb=(Mat*)addrRgba;

View File

@ -0,0 +1,11 @@
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<org.opencv.android.JavaCameraView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:id="@+id/tutorial4_activity_surface_view" />
</LinearLayout>

View File

@ -3,10 +3,14 @@ package org.opencv.samples.tutorial4;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.android.JavaCameraView;
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener;
import org.opencv.highgui.Highgui;
import org.opencv.imgproc.Imgproc;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.os.Bundle;
import android.util.Log;
import android.view.Menu;
@ -14,16 +18,27 @@ import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
public class Sample4Mixed extends Activity {
public class Sample4Mixed extends Activity implements CvCameraViewListener {
private static final String TAG = "OCVSample::Activity";
private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewGray;
private MenuItem mItemPreviewCanny;
private MenuItem mItemPreviewFeatures;
private Sample4View mView;
private static final int VIEW_MODE_RGBA = 0;
private static final int VIEW_MODE_GRAY = 1;
private static final int VIEW_MODE_CANNY = 2;
private static final int VIEW_MODE_FEATURES = 5;
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
private int mViewMode;
private Mat mRgba;
private Mat mIntermediateMat;
private Mat mGrayMat;
private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewGray;
private MenuItem mItemPreviewCanny;
private MenuItem mItemPreviewFeatures;
private JavaCameraView mOpenCvCameraView;
private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
@ -34,39 +49,7 @@ public class Sample4Mixed extends Activity {
// Load native library after(!) OpenCV initialization
System.loadLibrary("mixed_sample");
// Create and set View
mView = new Sample4View(mAppContext);
setContentView(mView);
// Check native OpenCV camera
if( !mView.openCamera() ) {
AlertDialog ad = new AlertDialog.Builder(mAppContext).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
} break;
/** OpenCV loader cannot start Google Play **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.d(TAG, "Google Play service is not accessible!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Google Play service is not accessible!\nTry to install the 'OpenCV Manager' and the appropriate 'OpenCV binary pack' APKs from OpenCV SDK manually via 'adb install' command.");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
finish();
}
});
MarketErrorMessage.show();
mOpenCvCameraView.enableView();
} break;
default:
{
@ -80,25 +63,6 @@ public class Sample4Mixed extends Activity {
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
protected void onPause() {
Log.i(TAG, "called onPause");
if (null != mView)
mView.releaseCamera();
super.onPause();
}
@Override
protected void onResume() {
Log.i(TAG, "called onResume");
super.onResume();
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack)) {
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
@ -106,6 +70,11 @@ public class Sample4Mixed extends Activity {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.tutorial4_surface_view);
mOpenCvCameraView = (JavaCameraView)findViewById(R.id.tutorial4_activity_surface_view);
mOpenCvCameraView.setCvCameraViewListener(this);
}
@Override
@ -119,17 +88,83 @@ public class Sample4Mixed extends Activity {
}
@Override
public void onPause()
{
mOpenCvCameraView.disableView();
super.onPause();
}
@Override
public void onResume()
{
super.onResume();
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, mLoaderCallback);
}
public void onDestroy() {
super.onDestroy();
mOpenCvCameraView.disableView();
}
public void onCameraViewStarted(int width, int height) {
mRgba = new Mat(height, width, CvType.CV_8UC4);
mIntermediateMat = new Mat(height, width, CvType.CV_8UC4);
mGrayMat = new Mat(height, width, CvType.CV_8UC1);
}
public void onCameraViewStopped() {
mRgba.release();
mGrayMat.release();
mIntermediateMat.release();
}
public Mat onCameraFrame(Mat inputFrame) {
final int viewMode = mViewMode;
switch (viewMode) {
case VIEW_MODE_GRAY:
// input frame has gray scale format
Imgproc.cvtColor(inputFrame, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
break;
case VIEW_MODE_RGBA:
// input frame has RBGA format
inputFrame.copyTo(mRgba);
break;
case VIEW_MODE_CANNY:
// input frame has gray scale format
Imgproc.Canny(inputFrame, mIntermediateMat, 80, 100);
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
break;
case VIEW_MODE_FEATURES:
// input frame has RGBA format
inputFrame.copyTo(mRgba);
Imgproc.cvtColor(mRgba, mGrayMat, Imgproc.COLOR_RGBA2GRAY);
FindFeatures(mGrayMat.getNativeObjAddr(), mRgba.getNativeObjAddr());
break;
}
return mRgba;
}
public boolean onOptionsItemSelected(MenuItem item) {
Log.i(TAG, "called onOptionsItemSelected; selected item: " + item);
if (item == mItemPreviewRGBA) {
mView.setViewMode(Sample4View.VIEW_MODE_RGBA);
mOpenCvCameraView.SetCaptureFormat(Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
mViewMode = VIEW_MODE_RGBA;
} else if (item == mItemPreviewGray) {
mView.setViewMode(Sample4View.VIEW_MODE_GRAY);
mOpenCvCameraView.SetCaptureFormat(Highgui.CV_CAP_ANDROID_GREY_FRAME);
mViewMode = VIEW_MODE_GRAY;
} else if (item == mItemPreviewCanny) {
mView.setViewMode(Sample4View.VIEW_MODE_CANNY);
mOpenCvCameraView.SetCaptureFormat(Highgui.CV_CAP_ANDROID_GREY_FRAME);
mViewMode = VIEW_MODE_CANNY;
} else if (item == mItemPreviewFeatures) {
mView.setViewMode(Sample4View.VIEW_MODE_FEATURES);
mViewMode = VIEW_MODE_FEATURES;
mOpenCvCameraView.SetCaptureFormat(Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
}
return true;
}
public native void FindFeatures(long matAddrGr, long matAddrRgba);
}

View File

@ -1,116 +0,0 @@
package org.opencv.samples.tutorial4;
import org.opencv.android.Utils;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.imgproc.Imgproc;
import android.content.Context;
import android.graphics.Bitmap;
import android.util.Log;
class Sample4View extends SampleViewBase {
private static final String TAG = "OCVSample::View";
public static final int VIEW_MODE_RGBA = 0;
public static final int VIEW_MODE_GRAY = 1;
public static final int VIEW_MODE_CANNY = 2;
public static final int VIEW_MODE_FEATURES = 5;
private Mat mYuv;
private Mat mRgba;
private Mat mGraySubmat;
private Mat mIntermediateMat;
private Bitmap mBitmap;
private int mViewMode;
public Sample4View(Context context) {
super(context);
}
@Override
protected void onPreviewStarted(int previewWidth, int previewHeight) {
Log.i(TAG, "called onPreviewStarted("+previewWidth+", "+previewHeight+")");
// initialize Mats before usage
mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2, getFrameWidth(), CvType.CV_8UC1);
mGraySubmat = mYuv.submat(0, getFrameHeight(), 0, getFrameWidth());
mRgba = new Mat();
mIntermediateMat = new Mat();
mBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888);
}
@Override
protected void onPreviewStopped() {
Log.i(TAG, "called onPreviewStopped");
if (mBitmap != null) {
mBitmap.recycle();
mBitmap = null;
}
synchronized (this) {
// Explicitly deallocate Mats
if (mYuv != null)
mYuv.release();
if (mRgba != null)
mRgba.release();
if (mGraySubmat != null)
mGraySubmat.release();
if (mIntermediateMat != null)
mIntermediateMat.release();
mYuv = null;
mRgba = null;
mGraySubmat = null;
mIntermediateMat = null;
}
}
@Override
protected Bitmap processFrame(byte[] data) {
mYuv.put(0, 0, data);
final int viewMode = mViewMode;
switch (viewMode) {
case VIEW_MODE_GRAY:
Imgproc.cvtColor(mGraySubmat, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
break;
case VIEW_MODE_RGBA:
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420sp2RGB, 4);
break;
case VIEW_MODE_CANNY:
Imgproc.Canny(mGraySubmat, mIntermediateMat, 80, 100);
Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
break;
case VIEW_MODE_FEATURES:
Imgproc.cvtColor(mYuv, mRgba, Imgproc.COLOR_YUV420sp2RGB, 4);
FindFeatures(mGraySubmat.getNativeObjAddr(), mRgba.getNativeObjAddr());
break;
}
Bitmap bmp = mBitmap;
try {
Utils.matToBitmap(mRgba, bmp);
} catch(Exception e) {
Log.e("org.opencv.samples.puzzle15", "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle();
bmp = null;
}
return bmp;
}
public native void FindFeatures(long matAddrGr, long matAddrRgba);
public void setViewMode(int viewMode) {
Log.i(TAG, "called setViewMode("+viewMode+")");
mViewMode = viewMode;
}
}

View File

@ -1,229 +0,0 @@
package org.opencv.samples.tutorial4;
import java.io.IOException;
import java.util.List;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.os.Build;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "OCVSample::BaseView";
private Camera mCamera;
private SurfaceHolder mHolder;
private int mFrameWidth;
private int mFrameHeight;
private byte[] mFrame;
private volatile boolean mThreadRun;
private byte[] mBuffer;
private SurfaceTexture mSf;
public SampleViewBase(Context context) {
super(context);
mHolder = getHolder();
mHolder.addCallback(this);
Log.i(TAG, "Instantiated new " + this.getClass());
}
public int getFrameWidth() {
return mFrameWidth;
}
public int getFrameHeight() {
return mFrameHeight;
}
public void setPreview() throws IOException {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mSf = new SurfaceTexture(10);
mCamera.setPreviewTexture( mSf );
}
else
mCamera.setPreviewDisplay(null);
}
public boolean openCamera() {
Log.i(TAG, "Opening Camera");
mCamera = null;
try {
mCamera = Camera.open();
}
catch (Exception e){
Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
}
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
try {
mCamera = Camera.open(camIdx);
}
catch (RuntimeException e) {
Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
}
}
}
if(mCamera == null) {
Log.e(TAG, "Can't open any camera");
return false;
}
mCamera.setPreviewCallbackWithBuffer(new PreviewCallback() {
public void onPreviewFrame(byte[] data, Camera camera) {
synchronized (SampleViewBase.this) {
System.arraycopy(data, 0, mFrame, 0, data.length);
SampleViewBase.this.notify();
}
camera.addCallbackBuffer(mBuffer);
}
});
return true;
}
public void releaseCamera() {
Log.i(TAG, "Releasing Camera");
mThreadRun = false;
synchronized (this) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.release();
mCamera = null;
}
}
onPreviewStopped();
}
public synchronized void setupCamera(int width, int height) {
if (mCamera != null) {
Log.i(TAG, "Setup Camera - " + width + "x" + height);
Camera.Parameters params = mCamera.getParameters();
List<Camera.Size> sizes = params.getSupportedPreviewSizes();
mFrameWidth = width;
mFrameHeight = height;
// selecting optimal camera preview size
{
int minDiff = Integer.MAX_VALUE;
for (Camera.Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = size.width;
mFrameHeight = size.height;
minDiff = Math.abs(size.height - height);
}
}
}
params.setPreviewSize(getFrameWidth(), getFrameHeight());
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
/* Now allocate the buffer */
params = mCamera.getParameters();
int size = params.getPreviewSize().width * params.getPreviewSize().height;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
/* The buffer where the current frame will be copied */
mFrame = new byte [size];
mCamera.addCallbackBuffer(mBuffer);
/* Notify that the preview is about to be started and deliver preview size */
onPreviewStarted(params.getPreviewSize().width, params.getPreviewSize().height);
try {
setPreview();
} catch (IOException e) {
Log.e(TAG, "mCamera.setPreviewDisplay/setPreviewTexture fails: " + e);
}
/* Now we can start a preview */
mCamera.startPreview();
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "called surfaceChanged");
// stop preview before making changes
try {
mCamera.stopPreview();
} catch (Exception e){
// ignore: tried to stop a non-existent preview
}
// start preview with new settings
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "called surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "called surfaceDestroyed");
}
/* The bitmap returned by this method shall be owned by the child and released in onPreviewStopped() */
protected abstract Bitmap processFrame(byte[] data);
/**
* This method is called when the preview process is being started. It is called before the first frame delivered and processFrame is called
* It is called with the width and height parameters of the preview process. It can be used to prepare the data needed during the frame processing.
* @param previewWidth - the width of the preview frames that will be delivered via processFrame
* @param previewHeight - the height of the preview frames that will be delivered via processFrame
*/
protected abstract void onPreviewStarted(int previewWidtd, int previewHeight);
/**
* This method is called when preview is stopped. When this method is called the preview stopped and all the processing of frames already completed.
* If the Bitmap object returned via processFrame is cached - it is a good time to recycle it.
* Any other resources used during the preview can be released.
*/
protected abstract void onPreviewStopped();
public void run() {
mThreadRun = true;
Log.i(TAG, "Started processing thread");
while (mThreadRun) {
Bitmap bmp = null;
synchronized (this) {
try {
this.wait();
if (!mThreadRun)
break;
bmp = processFrame(mFrame);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
if (bmp != null) {
Canvas canvas = mHolder.lockCanvas();
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
canvas.drawBitmap(bmp, (canvas.getWidth() - getFrameWidth()) / 2, (canvas.getHeight() - getFrameHeight()) / 2, null);
mHolder.unlockCanvasAndPost(canvas);
}
}
}
Log.i(TAG, "Finished processing thread");
}
}