Windows RT sample improved.

Corrent histogram output on YUV image implemented;
Some code refactoring done.
This commit is contained in:
Alexander Smorkalov 2013-06-19 03:57:26 -07:00
parent bf22567c09
commit b0854c605a
22 changed files with 194 additions and 488 deletions

View File

@ -313,7 +313,7 @@ void AdvancedCapture::AddEffectToImageStream()
changeTypeTask.get();
ShowStatusMessage("Change type on photo stream successful");
//Now add the effect on the image pin
task<void>(m_mediaCaptureMgr->AddEffectAsync(Windows::Media::Capture::MediaStreamType::Photo,"GrayscaleTransform.GrayscaleEffect", nullptr)).then([this](task<void> effectTask3)
task<void>(m_mediaCaptureMgr->AddEffectAsync(Windows::Media::Capture::MediaStreamType::Photo,"OcvTransform.OcvImageManipulations", nullptr)).then([this](task<void> effectTask3)
{
try
{
@ -348,7 +348,7 @@ void AdvancedCapture::AddEffectToImageStream()
else
{
//Add the effect to the image pin if the type is already "Video"
task<void>(mediaCapture->AddEffectAsync(Windows::Media::Capture::MediaStreamType::Photo,"GrayscaleTransform.GrayscaleEffect", nullptr)).then([this](task<void> effectTask3)
task<void>(mediaCapture->AddEffectAsync(Windows::Media::Capture::MediaStreamType::Photo,"OcvTransform.OcvImageManipulations", nullptr)).then([this](task<void> effectTask3)
{
try
{
@ -368,103 +368,6 @@ void AdvancedCapture::AddEffectToImageStream()
}
}
void AdvancedCapture::chkAddRemoveEffect_Checked(Platform::Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e)
{
}
void AdvancedCapture::chkAddRemoveEffect_Unchecked(Platform::Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e)
{
try
{
EffectTypeCombo->IsEnabled = false;
m_bEffectAdded = false;
create_task(m_mediaCaptureMgr->ClearEffectsAsync(Windows::Media::Capture::MediaStreamType::VideoPreview)).then([this](task<void> effectTask)
{
try
{
effectTask.get();
ShowStatusMessage("Remove effect from video preview stream successful");
if(m_bEffectAddedToRecord)
{
task<void>(m_mediaCaptureMgr->ClearEffectsAsync(Windows::Media::Capture::MediaStreamType::VideoRecord)).then([this](task<void> effectTask)
{
try
{
effectTask.get();
ShowStatusMessage("Remove effect from video record stream successful");
m_bEffectAddedToRecord = false;
if(m_bEffectAddedToPhoto)
{
task<void>(m_mediaCaptureMgr->ClearEffectsAsync(Windows::Media::Capture::MediaStreamType::Photo)).then([this](task<void> effectTask)
{
try
{
effectTask.get();
ShowStatusMessage("Remove effect from Photo stream successful");
m_bEffectAddedToPhoto = false;
}
catch(Exception ^e)
{
ShowExceptionMessage(e);
EffectTypeCombo->IsEnabled = true;
}
});
}
else
{
}
EffectTypeCombo->IsEnabled = true;
}
catch(Exception ^e)
{
ShowExceptionMessage(e);
EffectTypeCombo->IsEnabled = true;
}
});
}
else if(m_bEffectAddedToPhoto)
{
task<void>(m_mediaCaptureMgr->ClearEffectsAsync(Windows::Media::Capture::MediaStreamType::Photo)).then([this](task<void> effectTask)
{
try
{
effectTask.get();
ShowStatusMessage("Remove effect from Photo stream successful");
m_bEffectAddedToPhoto = false;
}
catch(Exception ^e)
{
ShowExceptionMessage(e);
EffectTypeCombo->IsEnabled = true;
}
});
}
else
{
EffectTypeCombo->IsEnabled = true;
}
}
catch (Exception ^e)
{
ShowExceptionMessage(e);
EffectTypeCombo->IsEnabled = true;
}
});
}
catch (Platform::Exception ^e)
{
ShowExceptionMessage(e);
EffectTypeCombo->IsEnabled = true;
}
}
void AdvancedCapture::ShowStatusMessage(Platform::String^ text)
{
rootPage->NotifyUser(text, NotifyType::StatusMessage);
@ -637,12 +540,6 @@ Windows::Media::Capture::VideoRotation AdvancedCapture::VideoRotationLookup(
}
}
void SDKSample::MediaCapture::AdvancedCapture::EffectTypeCombo_SelectionChanged(Platform::Object^ sender, Windows::UI::Xaml::Controls::SelectionChangedEventArgs^ e)
{
}
void SDKSample::MediaCapture::AdvancedCapture::Button_Click(Platform::Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e)
{
try
@ -653,7 +550,7 @@ void SDKSample::MediaCapture::AdvancedCapture::Button_Click(Platform::Object^ se
int index = EffectTypeCombo->SelectedIndex;
PropertySet^ props = ref new PropertySet();
props->Insert(L"{698649BE-8EAE-4551-A4CB-3EC98FBD3D86}", index);
create_task(m_mediaCaptureMgr->AddEffectAsync(Windows::Media::Capture::MediaStreamType::VideoPreview,"GrayscaleTransform.GrayscaleEffect", props)).then([this](task<void> effectTask)
create_task(m_mediaCaptureMgr->AddEffectAsync(Windows::Media::Capture::MediaStreamType::VideoPreview,"OcvTransform.OcvImageManipulations", props)).then([this](task<void> effectTask)
{
try
{
@ -670,7 +567,7 @@ void SDKSample::MediaCapture::AdvancedCapture::Button_Click(Platform::Object^ se
Windows::Media::MediaProperties::VideoEncodingProperties ^videoEncodingProperties = static_cast<Windows::Media::MediaProperties::VideoEncodingProperties ^>(props);
if(!videoEncodingProperties->Subtype->Equals("H264")) //Cant add an effect to an H264 stream
{
task<void>(mediaCapture->AddEffectAsync(Windows::Media::Capture::MediaStreamType::VideoRecord,"GrayscaleTransform.GrayscaleEffect", nullptr)).then([this](task<void> effectTask2)
task<void>(mediaCapture->AddEffectAsync(Windows::Media::Capture::MediaStreamType::VideoRecord,"OcvTransform.OcvImageManipulations", nullptr)).then([this](task<void> effectTask2)
{
try
{

View File

@ -58,8 +58,6 @@ namespace SDKSample
void lstEnumedDevices_SelectionChanged(Platform::Object^ sender, Windows::UI::Xaml::Controls::SelectionChangedEventArgs^ e);
void EnumerateWebcamsAsync();
void chkAddRemoveEffect_Checked(Platform::Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e);
void chkAddRemoveEffect_Unchecked(Platform::Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e);
void AddEffectToImageStream();
void ShowStatusMessage(Platform::String^ text);
@ -91,7 +89,6 @@ namespace SDKSample
bool m_bRotateVideoOnOrientationChange;
bool m_bReversePreviewRotation;
Windows::Foundation::EventRegistrationToken m_orientationChangedEventToken;
void EffectTypeCombo_SelectionChanged(Platform::Object^ sender, Windows::UI::Xaml::Controls::SelectionChangedEventArgs^ e);
void Button_Click(Platform::Object^ sender, Windows::UI::Xaml::RoutedEventArgs^ e);
};
}

View File

@ -3,7 +3,7 @@ Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 11 Express for Windows 8
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "MediaCapture", "MediaCapture.vcxproj", "{C5B886A7-8300-46FF-B533-9613DE2AF637}"
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "GrayscaleTransform", "MediaExtensions\Grayscale\Grayscale.vcxproj", "{BA69218F-DA5C-4D14-A78D-21A9E4DEC669}"
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "GrayscaleTransform", "MediaExtensions\OcvTransform\OcvTransform.vcxproj", "{BA69218F-DA5C-4D14-A78D-21A9E4DEC669}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution

View File

@ -156,17 +156,13 @@
</ClCompile>
</ItemGroup>
<ItemGroup>
<Image Include="Assets\microsoft-sdk.png" />
<Image Include="Assets\placeholder-sdk.png" />
<Image Include="Assets\smallTile-sdk.png" />
<Image Include="assets\opencv-logo-150.png" />
<Image Include="assets\opencv-logo-30.png" />
<Image Include="Assets\splash-sdk.png" />
<Image Include="Assets\squareTile-sdk.png" />
<Image Include="Assets\storeLogo-sdk.png" />
<Image Include="Assets\tile-sdk.png" />
<Image Include="Assets\windows-sdk.png" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="MediaExtensions\Grayscale\Grayscale.vcxproj">
<ProjectReference Include="MediaExtensions\OcvTransform\OcvTransform.vcxproj">
<Project>{ba69218f-da5c-4d14-a78d-21a9e4dec669}</Project>
</ProjectReference>
</ItemGroup>

View File

@ -1,101 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup>
<Image Include="Assets\microsoft-sdk.png">
<Filter>Assets</Filter>
</Image>
<Image Include="Assets\placeholder-sdk.png">
<Filter>Assets</Filter>
</Image>
<Image Include="Assets\smallTile-sdk.png">
<Filter>Assets</Filter>
</Image>
<Image Include="Assets\splash-sdk.png">
<Filter>Assets</Filter>
</Image>
<Image Include="Assets\squareTile-sdk.png">
<Filter>Assets</Filter>
</Image>
<Image Include="Assets\storeLogo-sdk.png">
<Filter>Assets</Filter>
</Image>
<Image Include="Assets\tile-sdk.png">
<Filter>Assets</Filter>
</Image>
<Image Include="Assets\windows-sdk.png">
<Filter>Assets</Filter>
</Image>
</ItemGroup>
<ItemGroup>
<ApplicationDefinition Include="App.xaml" />
</ItemGroup>
<ItemGroup>
<AppxManifest Include="Package.appxmanifest" />
</ItemGroup>
<ItemGroup>
<Page Include="MainPage.xaml" />
<Page Include="Common\StandardStyles.xaml">
<Filter>Common</Filter>
</Page>
<Page Include="Sample-Utils\SampleTemplateStyles.xaml">
<Filter>Sample-Utils</Filter>
</Page>
<Page Include="AdvancedCapture.xaml" />
</ItemGroup>
<ItemGroup>
<ClInclude Include="MainPage.xaml.h" />
<ClInclude Include="pch.h" />
<ClInclude Include="App.xaml.h" />
<ClInclude Include="Common\SuspensionManager.h">
<Filter>Common</Filter>
</ClInclude>
<ClInclude Include="Common\LayoutAwarePage.h">
<Filter>Common</Filter>
</ClInclude>
<ClInclude Include="Constants.h" />
<ClInclude Include="AdvancedCapture.xaml.h" />
</ItemGroup>
<ItemGroup>
<ClCompile Include="App.xaml.cpp" />
<ClCompile Include="MainPage.xaml.cpp" />
<ClCompile Include="pch.cpp" />
<ClCompile Include="Common\LayoutAwarePage.cpp">
<Filter>Common</Filter>
</ClCompile>
<ClCompile Include="Common\SuspensionManager.cpp">
<Filter>Common</Filter>
</ClCompile>
<ClCompile Include="Constants.cpp" />
<ClCompile Include="AdvancedCapture.xaml.cpp" />
</ItemGroup>
<ItemGroup>
<Filter Include="Assets">
<UniqueIdentifier>{132eec18-b164-4b15-a746-643880e9c5d9}</UniqueIdentifier>
</Filter>
<Filter Include="Common">
<UniqueIdentifier>{476b4177-f316-4458-8e13-cab3dc2381c5}</UniqueIdentifier>
</Filter>
<Filter Include="Sample-Utils">
<UniqueIdentifier>{54f287f8-e4cb-4f47-97d0-4c469de6992e}</UniqueIdentifier>
</Filter>
</ItemGroup>
<ItemGroup>
<None Include="..\..\..\..\build\install\bin\opencv_calib3d245.dll" />
<None Include="..\..\..\..\build\install\bin\opencv_contrib245.dll" />
<None Include="..\..\..\..\build\install\bin\opencv_core245.dll" />
<None Include="..\..\..\..\build\install\bin\opencv_features2d245.dll" />
<None Include="..\..\..\..\build\install\bin\opencv_flann245.dll" />
<None Include="..\..\..\..\build\install\bin\opencv_highgui245.dll" />
<None Include="..\..\..\..\build\install\bin\opencv_imgproc245.dll" />
<None Include="..\..\..\..\build\install\bin\opencv_legacy245.dll" />
<None Include="..\..\..\..\build\install\bin\opencv_ml245.dll" />
<None Include="..\..\..\..\build\install\bin\opencv_nonfree245.dll" />
<None Include="..\..\..\..\build\install\bin\opencv_objdetect245.dll" />
<None Include="..\..\..\..\build\install\bin\opencv_photo245.dll" />
<None Include="..\..\..\..\build\install\bin\opencv_stitching245.dll" />
<None Include="..\..\..\..\build\install\bin\opencv_superres245.dll" />
<None Include="..\..\..\..\build\install\bin\opencv_ts245.dll" />
<None Include="..\..\..\..\build\install\bin\opencv_video245.dll" />
<None Include="..\..\..\..\build\install\bin\opencv_videostab245.dll" />
</ItemGroup>
</Project>

View File

@ -1,22 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemGroup>
<Filter Include="Resources">
<UniqueIdentifier>bdc52ff6-58cb-464b-bf4f-0c1804b135ff</UniqueIdentifier>
<Extensions>rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms</Extensions>
</Filter>
</ItemGroup>
<ItemGroup>
<None Include="Grayscale.def" />
</ItemGroup>
<ItemGroup>
<ClCompile Include="dllmain.cpp" />
<ClCompile Include="Grayscale.cpp" />
</ItemGroup>
<ItemGroup>
<ClInclude Include="Grayscale.h" />
</ItemGroup>
<ItemGroup>
<Midl Include="GrayscaleTransform.idl" />
</ItemGroup>
</Project>

View File

@ -0,0 +1,92 @@
/* this ALWAYS GENERATED file contains the definitions for the interfaces */
/* File created by MIDL compiler version 8.00.0595 */
/* at Wed Jun 19 03:47:25 2013
*/
/* Compiler settings for C:\Users\ASMORK~1\AppData\Local\Temp\OcvImageManipulations.idl-2e8e757e:
Oicf, W1, Zp8, env=Win32 (32b run), target_arch=X86 8.00.0595
protocol : dce , ms_ext, c_ext, robust
error checks: allocation ref bounds_check enum stub_data
VC __declspec() decoration level:
__declspec(uuid()), __declspec(selectany), __declspec(novtable)
DECLSPEC_UUID(), MIDL_INTERFACE()
*/
/* @@MIDL_FILE_HEADING( ) */
#pragma warning( disable: 4049 ) /* more than 64k source lines */
/* verify that the <rpcndr.h> version is high enough to compile this file*/
#ifndef __REQUIRED_RPCNDR_H_VERSION__
#define __REQUIRED_RPCNDR_H_VERSION__ 475
#endif
#include "rpc.h"
#include "rpcndr.h"
#ifndef __RPCNDR_H_VERSION__
#error this stub requires an updated version of <rpcndr.h>
#endif // __RPCNDR_H_VERSION__
#ifndef __OcvImageManipulations_h__
#define __OcvImageManipulations_h__
#if defined(_MSC_VER) && (_MSC_VER >= 1020)
#pragma once
#endif
#if defined(__cplusplus)
#if defined(__MIDL_USE_C_ENUM)
#define MIDL_ENUM enum
#else
#define MIDL_ENUM enum class
#endif
#endif
/* Forward Declarations */
/* header files for imported files */
#include "Windows.Media.h"
#ifdef __cplusplus
extern "C"{
#endif
/* interface __MIDL_itf_OcvImageManipulations_0000_0000 */
/* [local] */
#pragma warning(push)
#pragma warning(disable:4001)
#pragma once
#pragma warning(pop)
#ifndef RUNTIMECLASS_OcvTransform_OcvImageManipulations_DEFINED
#define RUNTIMECLASS_OcvTransform_OcvImageManipulations_DEFINED
extern const __declspec(selectany) WCHAR RuntimeClass_OcvTransform_OcvImageManipulations[] = L"OcvTransform.OcvImageManipulations";
#endif
/* interface __MIDL_itf_OcvImageManipulations_0000_0000 */
/* [local] */
extern RPC_IF_HANDLE __MIDL_itf_OcvImageManipulations_0000_0000_v0_0_c_ifspec;
extern RPC_IF_HANDLE __MIDL_itf_OcvImageManipulations_0000_0000_v0_0_s_ifspec;
/* Additional Prototypes for ALL interfaces */
/* end of Additional Prototypes */
#ifdef __cplusplus
}
#endif
#endif

View File

@ -2,10 +2,10 @@ import "Windows.Media.idl";
#include <sdkddkver.h>
namespace GrayscaleTransform
namespace OcvTransform
{
[version(NTDDI_WIN8)]
runtimeclass GrayscaleEffect
runtimeclass OcvImageManipulations
{
}
}

View File

@ -5,49 +5,23 @@
//
// Copyright (c) Microsoft Corporation. All rights reserved.
#include "Grayscale.h"
#include "OcvTransform.h"
#include "bufferlock.h"
#include "opencv2\core\core.hpp"
#include "opencv2\imgproc\imgproc.hpp"
#pragma comment(lib, "d2d1")
using namespace Microsoft::WRL;
/*
This sample implements a video effect as a Media Foundation transform (MFT).
The video effect manipulates chroma values in a YUV image. In the default setting,
the entire image is converted to grayscale. Optionally, the application may set any
of the following attributes:
MFT_GRAYSCALE_DESTINATION_RECT (type = blob, UINT32[4] array)
Sets the destination rectangle for the effect. Pixels outside the destination
rectangle are not altered.
MFT_GRAYSCALE_SATURATION (type = double)
Sets the saturation level. The nominal range is [0...1]. Values beyond 1.0f
result in supersaturated colors. Values below 0.0f create inverted colors.
MFT_GRAYSCALE_CHROMA_ROTATION (type = double)
Rotates the chroma values of each pixel. The attribue value is the angle of
rotation in degrees. The result is a shift in hue.
The effect is implemented by treating the chroma value of each pixel as a vector [u,v],
and applying a transformation matrix to the vector. The saturation parameter is applied
as a scaling transform.
NOTES ON THE MFT IMPLEMENTATION
1. The MFT has fixed streams: One input stream and one output stream.
2. The MFT supports the following formats: UYVY, YUY2, NV12.
2. The MFT supports NV12 format only.
3. If the MFT is holding an input sample, SetInputType and SetOutputType both fail.
@ -82,18 +56,13 @@ NOTES ON THE MFT IMPLEMENTATION
*/
// Video FOURCC codes.
const DWORD FOURCC_NV12 = '21VN';
// Static array of media types (preferred and accepted).
const GUID g_MediaSubtypes[] =
{
MFVideoFormat_NV12
};
HRESULT GetImageSize(DWORD fcc, UINT32 width, UINT32 height, DWORD* pcbImage);
HRESULT GetDefaultStride(IMFMediaType *pType, LONG *plStride);
bool ValidateRect(const RECT& rc);
template <typename T>
inline T clamp(const T& val, const T& minVal, const T& maxVal)
@ -101,92 +70,7 @@ inline T clamp(const T& val, const T& minVal, const T& maxVal)
return (val < minVal ? minVal : (val > maxVal ? maxVal : val));
}
//-------------------------------------------------------------------
// Functions to convert a YUV images to grayscale.
//
// In all cases, the same transformation is applied to the 8-bit
// chroma values, but the pixel layout in memory differs.
//
// The image conversion functions take the following parameters:
//
// mat Transfomation matrix for chroma values.
// rcDest Destination rectangle.
// pDest Pointer to the destination buffer.
// lDestStride Stride of the destination buffer, in bytes.
// pSrc Pointer to the source buffer.
// lSrcStride Stride of the source buffer, in bytes.
// dwWidthInPixels Frame width in pixels.
// dwHeightInPixels Frame height, in pixels.
//-------------------------------------------------------------------
// Convert NV12 image
void TransformImage_NV12(
const D2D1::Matrix3x2F& mat,
const D2D_RECT_U& rcDest,
_Inout_updates_(_Inexpressible_(2 * lDestStride * dwHeightInPixels)) BYTE *pDest,
_In_ LONG lDestStride,
_In_reads_(_Inexpressible_(2 * lSrcStride * dwHeightInPixels)) const BYTE* pSrc,
_In_ LONG lSrcStride,
_In_ DWORD dwWidthInPixels,
_In_ DWORD dwHeightInPixels)
{
// NV12 is planar: Y plane, followed by packed U-V plane.
// Y plane
for (DWORD y = 0; y < dwHeightInPixels; y++)
{
CopyMemory(pDest, pSrc, dwWidthInPixels);
pDest += lDestStride;
pSrc += lSrcStride;
}
// U-V plane
// NOTE: The U-V plane has 1/2 the number of lines as the Y plane.
// Lines above the destination rectangle.
DWORD y = 0;
const DWORD y0 = rcDest.bottom < dwHeightInPixels ? rcDest.bottom : dwHeightInPixels;
for ( ; y < rcDest.top/2; y++)
{
memcpy(pDest, pSrc, dwWidthInPixels);
pSrc += lSrcStride;
pDest += lDestStride;
}
// Lines within the destination rectangle.
for ( ; y < y0/2; y++)
{
for (DWORD x = 0; (x + 1) < dwWidthInPixels; x += 2)
{
if (x >= rcDest.left && x < rcDest.right)
{
pDest[x] = 0;
pDest[x+1] = 0;
}
else
{
pDest[x] = pSrc[x];
pDest[x+1] = pSrc[x+1];
}
}
pDest += lDestStride;
pSrc += lSrcStride;
}
// Lines below the destination rectangle.
for ( ; y < dwHeightInPixels/2; y++)
{
memcpy(pDest, pSrc, dwWidthInPixels);
pSrc += lSrcStride;
pDest += lDestStride;
}
}
CGrayscale::CGrayscale() :
OcvImageManipulations::OcvImageManipulations() :
m_pSample(NULL), m_pInputType(NULL), m_pOutputType(NULL),
m_imageWidthInPixels(0), m_imageHeightInPixels(0), m_cbImageSize(0),
m_TransformType(Preview), m_bStreamingInitialized(false),
@ -195,7 +79,7 @@ CGrayscale::CGrayscale() :
InitializeCriticalSectionEx(&m_critSec, 3000, 0);
}
CGrayscale::~CGrayscale()
OcvImageManipulations::~OcvImageManipulations()
{
SafeRelease(&m_pInputType);
SafeRelease(&m_pOutputType);
@ -205,7 +89,7 @@ CGrayscale::~CGrayscale()
}
// Initialize the instance.
STDMETHODIMP CGrayscale::RuntimeClassInitialize()
STDMETHODIMP OcvImageManipulations::RuntimeClassInitialize()
{
// Create the attribute store.
return MFCreateAttributes(&m_pAttributes, 3);
@ -217,7 +101,7 @@ STDMETHODIMP CGrayscale::RuntimeClassInitialize()
// SetProperties
// Sets the configuration of the effect
//-------------------------------------------------------------------
HRESULT CGrayscale::SetProperties(ABI::Windows::Foundation::Collections::IPropertySet *pConfiguration)
HRESULT OcvImageManipulations::SetProperties(ABI::Windows::Foundation::Collections::IPropertySet *pConfiguration)
{
HRESULT hr = S_OK;
@ -237,14 +121,16 @@ HRESULT CGrayscale::SetProperties(ABI::Windows::Foundation::Collections::IProper
spSetting->Lookup(key, &value);
Microsoft::WRL::ComPtr<ABI::Windows::Foundation::IReference<int>> ref;
value->QueryInterface(IID_PPV_ARGS(&ref));
hr = value->QueryInterface(IID_PPV_ARGS(&ref));
int effect = InvalidEffect;
ref->get_Value(&effect);
hr = ref->get_Value(&effect);
if ((effect >= 0) && (effect < InvalidEffect))
{
m_TransformType = (ProcessingType)effect;
}
}
return hr;
}
// IMFTransform methods. Refer to the Media Foundation SDK documentation for details.
@ -254,7 +140,7 @@ HRESULT CGrayscale::SetProperties(ABI::Windows::Foundation::Collections::IProper
// Returns the minimum and maximum number of streams.
//-------------------------------------------------------------------
HRESULT CGrayscale::GetStreamLimits(
HRESULT OcvImageManipulations::GetStreamLimits(
DWORD *pdwInputMinimum,
DWORD *pdwInputMaximum,
DWORD *pdwOutputMinimum,
@ -283,7 +169,7 @@ HRESULT CGrayscale::GetStreamLimits(
// Returns the actual number of streams.
//-------------------------------------------------------------------
HRESULT CGrayscale::GetStreamCount(
HRESULT OcvImageManipulations::GetStreamCount(
DWORD *pcInputStreams,
DWORD *pcOutputStreams
)
@ -307,7 +193,7 @@ HRESULT CGrayscale::GetStreamCount(
// Returns stream IDs for the input and output streams.
//-------------------------------------------------------------------
HRESULT CGrayscale::GetStreamIDs(
HRESULT OcvImageManipulations::GetStreamIDs(
DWORD dwInputIDArraySize,
DWORD *pdwInputIDs,
DWORD dwOutputIDArraySize,
@ -328,7 +214,7 @@ HRESULT CGrayscale::GetStreamIDs(
// Returns information about an input stream.
//-------------------------------------------------------------------
HRESULT CGrayscale::GetInputStreamInfo(
HRESULT OcvImageManipulations::GetInputStreamInfo(
DWORD dwInputStreamID,
MFT_INPUT_STREAM_INFO * pStreamInfo
)
@ -375,7 +261,7 @@ HRESULT CGrayscale::GetInputStreamInfo(
// Returns information about an output stream.
//-------------------------------------------------------------------
HRESULT CGrayscale::GetOutputStreamInfo(
HRESULT OcvImageManipulations::GetOutputStreamInfo(
DWORD dwOutputStreamID,
MFT_OUTPUT_STREAM_INFO * pStreamInfo
)
@ -424,7 +310,7 @@ HRESULT CGrayscale::GetOutputStreamInfo(
// Returns the attributes for the MFT.
//-------------------------------------------------------------------
HRESULT CGrayscale::GetAttributes(IMFAttributes** ppAttributes)
HRESULT OcvImageManipulations::GetAttributes(IMFAttributes** ppAttributes)
{
if (ppAttributes == NULL)
{
@ -446,7 +332,7 @@ HRESULT CGrayscale::GetAttributes(IMFAttributes** ppAttributes)
// Returns stream-level attributes for an input stream.
//-------------------------------------------------------------------
HRESULT CGrayscale::GetInputStreamAttributes(
HRESULT OcvImageManipulations::GetInputStreamAttributes(
DWORD dwInputStreamID,
IMFAttributes **ppAttributes
)
@ -461,7 +347,7 @@ HRESULT CGrayscale::GetInputStreamAttributes(
// Returns stream-level attributes for an output stream.
//-------------------------------------------------------------------
HRESULT CGrayscale::GetOutputStreamAttributes(
HRESULT OcvImageManipulations::GetOutputStreamAttributes(
DWORD dwOutputStreamID,
IMFAttributes **ppAttributes
)
@ -475,7 +361,7 @@ HRESULT CGrayscale::GetOutputStreamAttributes(
// DeleteInputStream
//-------------------------------------------------------------------
HRESULT CGrayscale::DeleteInputStream(DWORD dwStreamID)
HRESULT OcvImageManipulations::DeleteInputStream(DWORD dwStreamID)
{
// This MFT has a fixed number of input streams, so the method is not supported.
return E_NOTIMPL;
@ -486,7 +372,7 @@ HRESULT CGrayscale::DeleteInputStream(DWORD dwStreamID)
// AddInputStreams
//-------------------------------------------------------------------
HRESULT CGrayscale::AddInputStreams(
HRESULT OcvImageManipulations::AddInputStreams(
DWORD cStreams,
DWORD *adwStreamIDs
)
@ -501,7 +387,7 @@ HRESULT CGrayscale::AddInputStreams(
// Returns a preferred input type.
//-------------------------------------------------------------------
HRESULT CGrayscale::GetInputAvailableType(
HRESULT OcvImageManipulations::GetInputAvailableType(
DWORD dwInputStreamID,
DWORD dwTypeIndex, // 0-based
IMFMediaType **ppType
@ -549,7 +435,7 @@ HRESULT CGrayscale::GetInputAvailableType(
// Returns a preferred output type.
//-------------------------------------------------------------------
HRESULT CGrayscale::GetOutputAvailableType(
HRESULT OcvImageManipulations::GetOutputAvailableType(
DWORD dwOutputStreamID,
DWORD dwTypeIndex, // 0-based
IMFMediaType **ppType
@ -594,7 +480,7 @@ HRESULT CGrayscale::GetOutputAvailableType(
// SetInputType
//-------------------------------------------------------------------
HRESULT CGrayscale::SetInputType(
HRESULT OcvImageManipulations::SetInputType(
DWORD dwInputStreamID,
IMFMediaType *pType, // Can be NULL to clear the input type.
DWORD dwFlags
@ -656,7 +542,7 @@ done:
// SetOutputType
//-------------------------------------------------------------------
HRESULT CGrayscale::SetOutputType(
HRESULT OcvImageManipulations::SetOutputType(
DWORD dwOutputStreamID,
IMFMediaType *pType, // Can be NULL to clear the output type.
DWORD dwFlags
@ -718,7 +604,7 @@ done:
// Returns the current input type.
//-------------------------------------------------------------------
HRESULT CGrayscale::GetInputCurrentType(
HRESULT OcvImageManipulations::GetInputCurrentType(
DWORD dwInputStreamID,
IMFMediaType **ppType
)
@ -755,7 +641,7 @@ HRESULT CGrayscale::GetInputCurrentType(
// Returns the current output type.
//-------------------------------------------------------------------
HRESULT CGrayscale::GetOutputCurrentType(
HRESULT OcvImageManipulations::GetOutputCurrentType(
DWORD dwOutputStreamID,
IMFMediaType **ppType
)
@ -793,7 +679,7 @@ HRESULT CGrayscale::GetOutputCurrentType(
// Query if the MFT is accepting more input.
//-------------------------------------------------------------------
HRESULT CGrayscale::GetInputStatus(
HRESULT OcvImageManipulations::GetInputStatus(
DWORD dwInputStreamID,
DWORD *pdwFlags
)
@ -840,7 +726,7 @@ HRESULT CGrayscale::GetInputStatus(
// Query if the MFT can produce output.
//-------------------------------------------------------------------
HRESULT CGrayscale::GetOutputStatus(DWORD *pdwFlags)
HRESULT OcvImageManipulations::GetOutputStatus(DWORD *pdwFlags)
{
if (pdwFlags == NULL)
{
@ -869,7 +755,7 @@ HRESULT CGrayscale::GetOutputStatus(DWORD *pdwFlags)
// Sets the range of time stamps that the MFT will output.
//-------------------------------------------------------------------
HRESULT CGrayscale::SetOutputBounds(
HRESULT OcvImageManipulations::SetOutputBounds(
LONGLONG hnsLowerBound,
LONGLONG hnsUpperBound
)
@ -884,7 +770,7 @@ HRESULT CGrayscale::SetOutputBounds(
// Sends an event to an input stream.
//-------------------------------------------------------------------
HRESULT CGrayscale::ProcessEvent(
HRESULT OcvImageManipulations::ProcessEvent(
DWORD dwInputStreamID,
IMFMediaEvent *pEvent
)
@ -900,7 +786,7 @@ HRESULT CGrayscale::ProcessEvent(
// ProcessMessage
//-------------------------------------------------------------------
HRESULT CGrayscale::ProcessMessage(
HRESULT OcvImageManipulations::ProcessMessage(
MFT_MESSAGE_TYPE eMessage,
ULONG_PTR ulParam
)
@ -965,7 +851,7 @@ HRESULT CGrayscale::ProcessMessage(
// Process an input sample.
//-------------------------------------------------------------------
HRESULT CGrayscale::ProcessInput(
HRESULT OcvImageManipulations::ProcessInput(
DWORD dwInputStreamID,
IMFSample *pSample,
DWORD dwFlags
@ -1030,7 +916,7 @@ done:
// Process an output sample.
//-------------------------------------------------------------------
HRESULT CGrayscale::ProcessOutput(
HRESULT OcvImageManipulations::ProcessOutput(
DWORD dwFlags,
DWORD cOutputBufferCount,
MFT_OUTPUT_DATA_BUFFER *pOutputSamples, // one per stream
@ -1150,7 +1036,7 @@ done:
// dwTypeIndex: Index into the list of peferred media types.
// ppmt: Receives a pointer to the media type.
HRESULT CGrayscale::OnGetPartialType(DWORD dwTypeIndex, IMFMediaType **ppmt)
HRESULT OcvImageManipulations::OnGetPartialType(DWORD dwTypeIndex, IMFMediaType **ppmt)
{
if (dwTypeIndex >= ARRAYSIZE(g_MediaSubtypes))
{
@ -1188,7 +1074,7 @@ done:
// Validate an input media type.
HRESULT CGrayscale::OnCheckInputType(IMFMediaType *pmt)
HRESULT OcvImageManipulations::OnCheckInputType(IMFMediaType *pmt)
{
assert(pmt != NULL);
@ -1217,7 +1103,7 @@ HRESULT CGrayscale::OnCheckInputType(IMFMediaType *pmt)
// Validate an output media type.
HRESULT CGrayscale::OnCheckOutputType(IMFMediaType *pmt)
HRESULT OcvImageManipulations::OnCheckOutputType(IMFMediaType *pmt)
{
assert(pmt != NULL);
@ -1247,7 +1133,7 @@ HRESULT CGrayscale::OnCheckOutputType(IMFMediaType *pmt)
// Validate a media type (input or output)
HRESULT CGrayscale::OnCheckMediaType(IMFMediaType *pmt)
HRESULT OcvImageManipulations::OnCheckMediaType(IMFMediaType *pmt)
{
BOOL bFoundMatchingSubtype = FALSE;
@ -1307,7 +1193,7 @@ done:
//
// Prerequisite: The input type was already validated.
void CGrayscale::OnSetInputType(IMFMediaType *pmt)
void OcvImageManipulations::OnSetInputType(IMFMediaType *pmt)
{
// if pmt is NULL, clear the type.
// if pmt is non-NULL, set the type.
@ -1328,7 +1214,7 @@ void CGrayscale::OnSetInputType(IMFMediaType *pmt)
//
// Prerequisite: The output type was already validated.
void CGrayscale::OnSetOutputType(IMFMediaType *pmt)
void OcvImageManipulations::OnSetOutputType(IMFMediaType *pmt)
{
// If pmt is NULL, clear the type. Otherwise, set the type.
@ -1346,7 +1232,7 @@ void CGrayscale::OnSetOutputType(IMFMediaType *pmt)
// This method is called if the client sends the MFT_MESSAGE_NOTIFY_BEGIN_STREAMING
// message, or when the client processes a sample, whichever happens first.
HRESULT CGrayscale::BeginStreaming()
HRESULT OcvImageManipulations::BeginStreaming()
{
HRESULT hr = S_OK;
@ -1366,7 +1252,7 @@ HRESULT CGrayscale::BeginStreaming()
// message, or when the media type changes. In general, it should be called whenever
// the streaming parameters need to be reset.
HRESULT CGrayscale::EndStreaming()
HRESULT OcvImageManipulations::EndStreaming()
{
m_bStreamingInitialized = false;
return S_OK;
@ -1376,7 +1262,7 @@ HRESULT CGrayscale::EndStreaming()
// Generate output data.
HRESULT CGrayscale::OnProcessOutput(IMFMediaBuffer *pIn, IMFMediaBuffer *pOut)
HRESULT OcvImageManipulations::OnProcessOutput(IMFMediaBuffer *pIn, IMFMediaBuffer *pOut)
{
BYTE *pDest = NULL; // Destination buffer.
LONG lDestStride = 0; // Destination stride.
@ -1447,10 +1333,17 @@ HRESULT CGrayscale::OnProcessOutput(IMFMediaBuffer *pIn, IMFMediaBuffer *pOut)
const int mHistSize[] = {25};
const float baseRabge[] = {0.f,256.f};
const float* ranges[] = {baseRabge};
const cv::Scalar mColorsRGB[] = { cv::Scalar(200, 0, 0, 255), cv::Scalar(0, 200, 0, 255),
cv::Scalar(0, 0, 200, 255) };
const cv::Scalar mColorsY[] = { cv::Scalar(76), cv::Scalar(149), cv::Scalar(29) };
const cv::Scalar mColorsUV[] = { cv::Scalar(84, 255), cv::Scalar(43, 21), cv::Scalar(255, 107) };
cv::Mat OutputY(m_imageHeightInPixels, m_imageWidthInPixels, CV_8UC1, pDest, lDestStride);
cv::Mat OutputUV(m_imageHeightInPixels/2, m_imageWidthInPixels/2,
CV_8UC2, pDest+m_imageHeightInPixels*lDestStride, lDestStride);
cv::Mat BgrFrame;
InputFrame.copyTo(OutputFrame);
cv::cvtColor(InputFrame, BgrFrame, cv::COLOR_YUV420sp2BGR);
int thikness = (int) (BgrFrame.cols / (mHistSizeNum + 10) / 5);
if(thikness > 5) thikness = 5;
@ -1464,14 +1357,20 @@ HRESULT CGrayscale::OnProcessOutput(IMFMediaBuffer *pIn, IMFMediaBuffer *pOut)
cv::normalize(hist, hist, BgrFrame.rows/2, 0, cv::NORM_INF);
for(int h=0; h<mHistSizeNum; h++) {
cv::Point mP1, mP2;
mP1.x = mP2.x = offset + (c * (mHistSizeNum + 10) + h) * thikness;
// Draw on Y plane
mP1.x = mP2.x = offset + (c * (mHistSizeNum + 10) + h) * thikness;
mP1.y = BgrFrame.rows-1;
mP2.y = mP1.y - 2 - hist.at<float>(h);
cv::line(BgrFrame, mP1, mP2, mColorsRGB[c], thikness);
}
}
mP2.y = mP1.y - 2 - (int)hist.at<float>(h);
cv::line(OutputY, mP1, mP2, mColorsY[c], thikness);
cv::cvtColor(BgrFrame, OutputFrame, cv::COLOR_BGR2YUV_I420);
// Draw on UV planes
mP1.x /= 2;
mP1.y /= 2;
mP2.x /= 2;
mP2.y /= 2;
cv::line(OutputUV, mP1, mP2, mColorsUV[c], thikness/2);
}
}
} break;
default:
break;
@ -1486,7 +1385,7 @@ HRESULT CGrayscale::OnProcessOutput(IMFMediaBuffer *pIn, IMFMediaBuffer *pOut)
// Flush the MFT.
HRESULT CGrayscale::OnFlush()
HRESULT OcvImageManipulations::OnFlush()
{
// For this MFT, flushing just means releasing the input sample.
SafeRelease(&m_pSample);
@ -1497,7 +1396,7 @@ HRESULT CGrayscale::OnFlush()
// Update the format information. This method is called whenever the
// input type is set.
HRESULT CGrayscale::UpdateFormatInfo()
HRESULT OcvImageManipulations::UpdateFormatInfo()
{
HRESULT hr = S_OK;
@ -1526,8 +1425,8 @@ HRESULT CGrayscale::UpdateFormatInfo()
goto done;
}
// Calculate the image size (not including padding)
hr = GetImageSize(subtype.Data1, m_imageWidthInPixels, m_imageHeightInPixels, &m_cbImageSize);
// Calculate the image size for YUV NV12 image(not including padding)
m_cbImageSize = (m_imageHeightInPixels + m_imageHeightInPixels/2)*m_imageWidthInPixels;
}
done:
@ -1535,35 +1434,6 @@ done:
}
// Calculate the size of the buffer needed to store the image.
// fcc: The FOURCC code of the video format.
HRESULT GetImageSize(DWORD fcc, UINT32 width, UINT32 height, DWORD* pcbImage)
{
HRESULT hr = S_OK;
switch (fcc)
{
case FOURCC_NV12:
// check overflow
if ((height/2 > MAXDWORD - height) || ((height + height/2) > MAXDWORD / width))
{
hr = E_INVALIDARG;
}
else
{
// 12 bpp
*pcbImage = width * (height + (height/2));
}
break;
default:
hr = E_FAIL; // Unsupported type.
}
return hr;
}
// Get the default stride for a video format.
HRESULT GetDefaultStride(IMFMediaType *pType, LONG *plStride)
{
@ -1614,23 +1484,3 @@ HRESULT GetDefaultStride(IMFMediaType *pType, LONG *plStride)
return hr;
}
// Validate that a rectangle meets the following criteria:
//
// - All coordinates are non-negative.
// - The rectangle is not flipped (top > bottom, left > right)
//
// These are the requirements for the destination rectangle.
bool ValidateRect(const RECT& rc)
{
if (rc.left < 0 || rc.top < 0)
{
return false;
}
if (rc.left > rc.right || rc.top > rc.bottom)
{
return false;
}
return true;
}

View File

@ -18,14 +18,11 @@
#include <strsafe.h>
#include <assert.h>
// Note: The Direct2D helper library is included for its 2D matrix operations.
#include <D2d1helper.h>
#include <wrl\implements.h>
#include <wrl\module.h>
#include <windows.media.h>
#include "GrayscaleTransform.h"
#include "OcvImageManipulations.h"
// CLSID of the MFT.
DEFINE_GUID(CLSID_GrayscaleMFT,
@ -38,7 +35,7 @@ DEFINE_GUID(CLSID_GrayscaleMFT,
// Configuration attributes
// {698649BE-8EAE-4551-A4CB-3EC98FBD3D86}
DEFINE_GUID(MFT_IMAGE_EFFECT,
DEFINE_GUID(OCV_IMAGE_EFFECT,
0x698649be, 0x8eae, 0x4551, 0xa4, 0xcb, 0x3e, 0xc9, 0x8f, 0xbd, 0x3d, 0x86);
@ -61,21 +58,21 @@ template <class T> void SafeRelease(T **ppT)
}
}
// CGrayscale class:
// OcvImageManipulations class:
// Implements a grayscale video effect.
class CGrayscale
class OcvImageManipulations
: public Microsoft::WRL::RuntimeClass<
Microsoft::WRL::RuntimeClassFlags< Microsoft::WRL::RuntimeClassType::WinRtClassicComMix >,
ABI::Windows::Media::IMediaExtension,
IMFTransform >
{
InspectableClass(RuntimeClass_GrayscaleTransform_GrayscaleEffect, BaseTrust)
InspectableClass(RuntimeClass_OcvTransform_OcvImageManipulations, BaseTrust)
public:
CGrayscale();
OcvImageManipulations();
~CGrayscale();
~OcvImageManipulations();
STDMETHOD(RuntimeClassInitialize)();

View File

@ -30,8 +30,8 @@
<VCTargetsPath Condition="'$(VCTargetsPath11)' != '' and '$(VSVersion)' == '' and '$(VisualStudioVersion)' == ''">$(VCTargetsPath11)</VCTargetsPath>
<ProjectGuid>{BA69218F-DA5C-4D14-A78D-21A9E4DEC669}</ProjectGuid>
<Keyword>Win32Proj</Keyword>
<RootNamespace>GrayscaleTransform</RootNamespace>
<ProjectName>GrayscaleTransform</ProjectName>
<RootNamespace>OcvTransform</RootNamespace>
<ProjectName>OcvTransform</ProjectName>
<MinimumVisualStudioVersion>11.0</MinimumVisualStudioVersion>
<AppContainerApplication>true</AppContainerApplication>
</PropertyGroup>
@ -129,7 +129,7 @@
<SubSystem>Console</SubSystem>
<AdditionalDependencies>runtimeobject.lib;%(AdditionalDependencies);mf.lib;mfuuid.lib;mfplat.lib;opencv_core245.lib;opencv_imgproc245.lib</AdditionalDependencies>
<IgnoreAllDefaultLibraries>false</IgnoreAllDefaultLibraries>
<ModuleDefinitionFile>Grayscale.def</ModuleDefinitionFile>
<ModuleDefinitionFile>OcvTransform.def</ModuleDefinitionFile>
<AdditionalLibraryDirectories>$(OPENCV_DIR)\lib;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
</Link>
<CustomBuildStep>
@ -153,7 +153,7 @@
<SubSystem>Console</SubSystem>
<AdditionalDependencies>runtimeobject.lib;%(AdditionalDependencies);mf.lib;mfuuid.lib;mfplat.lib</AdditionalDependencies>
<IgnoreAllDefaultLibraries>false</IgnoreAllDefaultLibraries>
<ModuleDefinitionFile>Grayscale.def</ModuleDefinitionFile>
<ModuleDefinitionFile>OcvTransform.def</ModuleDefinitionFile>
<AdditionalLibraryDirectories>$(OPENCV_DIR)\lib;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
</Link>
<CustomBuildStep>
@ -177,7 +177,7 @@
<SubSystem>Console</SubSystem>
<AdditionalDependencies>runtimeobject.lib;%(AdditionalDependencies);mf.lib;mfuuid.lib;mfplat.lib</AdditionalDependencies>
<IgnoreAllDefaultLibraries>false</IgnoreAllDefaultLibraries>
<ModuleDefinitionFile>Grayscale.def</ModuleDefinitionFile>
<ModuleDefinitionFile>OcvTransform.def</ModuleDefinitionFile>
<AdditionalLibraryDirectories>$(OPENCV_DIR)\lib;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
</Link>
<CustomBuildStep>
@ -201,7 +201,7 @@
<SubSystem>Console</SubSystem>
<AdditionalDependencies>runtimeobject.lib;%(AdditionalDependencies);mf.lib;mfuuid.lib;mfplat.lib</AdditionalDependencies>
<IgnoreAllDefaultLibraries>false</IgnoreAllDefaultLibraries>
<ModuleDefinitionFile>Grayscale.def</ModuleDefinitionFile>
<ModuleDefinitionFile>OcvTransform.def</ModuleDefinitionFile>
<AdditionalLibraryDirectories>$(OPENCV_DIR)\lib;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
</Link>
<CustomBuildStep>
@ -225,7 +225,7 @@
<SubSystem>Console</SubSystem>
<AdditionalDependencies>runtimeobject.lib;%(AdditionalDependencies);mf.lib;mfuuid.lib;mfplat.lib</AdditionalDependencies>
<IgnoreAllDefaultLibraries>false</IgnoreAllDefaultLibraries>
<ModuleDefinitionFile>Grayscale.def</ModuleDefinitionFile>
<ModuleDefinitionFile>OcvTransform.def</ModuleDefinitionFile>
<AdditionalLibraryDirectories>$(OPENCV_DIR)\lib;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
</Link>
<CustomBuildStep>
@ -249,7 +249,7 @@
<SubSystem>Console</SubSystem>
<AdditionalDependencies>runtimeobject.lib;%(AdditionalDependencies);mf.lib;mfuuid.lib;mfplat.lib</AdditionalDependencies>
<IgnoreAllDefaultLibraries>false</IgnoreAllDefaultLibraries>
<ModuleDefinitionFile>Grayscale.def</ModuleDefinitionFile>
<ModuleDefinitionFile>OcvTransform.def</ModuleDefinitionFile>
<AdditionalLibraryDirectories>$(OPENCV_DIR)\lib;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
</Link>
<CustomBuildStep>
@ -258,17 +258,17 @@
</CustomBuildStep>
</ItemDefinitionGroup>
<ItemGroup>
<ClInclude Include="Grayscale.h" />
<ClInclude Include="OcvTransform.h" />
</ItemGroup>
<ItemGroup>
<ClCompile Include="dllmain.cpp" />
<ClCompile Include="Grayscale.cpp" />
<ClCompile Include="OcvTransform.cpp" />
</ItemGroup>
<ItemGroup>
<None Include="Grayscale.def" />
<None Include="OcvTransform.def" />
</ItemGroup>
<ItemGroup>
<Midl Include="GrayscaleTransform.idl">
<Midl Include="OcvImageManipulations.idl">
<MetadataFileName Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
</MetadataFileName>
<MetadataFileName Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">

View File

@ -12,12 +12,12 @@
//////////////////////////////////////////////////////////////////////////
#include <initguid.h>
#include "Grayscale.h"
#include "OcvTransform.h"
using namespace Microsoft::WRL;
namespace Microsoft { namespace Samples {
ActivatableClass(CGrayscale);
ActivatableClass(OcvImageManipulations);
}}
BOOL WINAPI DllMain( _In_ HINSTANCE hInstance, _In_ DWORD dwReason, _In_opt_ LPVOID lpReserved )

View File

@ -15,8 +15,8 @@
</Resources>
<Applications>
<Application Id="MediaCapture.App" Executable="$targetnametoken$.exe" EntryPoint="MediaCapture.App">
<VisualElements DisplayName="MediaCapture CPP sample" Logo="Assets\squareTile-sdk.png" SmallLogo="Assets\smallTile-sdk.png" Description="MediaCapture CPP sample" ForegroundText="light" BackgroundColor="#00b2f0">
<DefaultTile ShortName="MC CPP" ShowName="allLogos" />
<VisualElements DisplayName="OCV Image Manipulations" Logo="assets\opencv-logo-150.png" SmallLogo="assets\opencv-logo-30.png" Description="OpenCV Image Manipulations sample" ForegroundText="light" BackgroundColor="#00b2f0">
<DefaultTile ShortName="Ocv ImageManipulations" ShowName="allLogos" />
<SplashScreen Image="Assets\splash-sdk.png" BackgroundColor="#00b2f0" />
</VisualElements>
</Application>
@ -28,8 +28,8 @@
<Extensions>
<Extension Category="windows.activatableClass.inProcessServer">
<InProcessServer>
<Path>GrayscaleTransform.dll</Path>
<ActivatableClass ActivatableClassId="GrayscaleTransform.GrayscaleEffect" ThreadingModel="both" />
<Path>OcvTransform.dll</Path>
<ActivatableClass ActivatableClassId="OcvTransform.OcvImageManipulations" ThreadingModel="both" />
</InProcessServer>
</Extension>
</Extensions>

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 8.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.4 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.5 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.6 KiB