Home | History | Annotate | Download | only in PhoneXamlDirect3DApp1Comp
      1 #include "pch.h"
      2 #include "Direct3DInterop.h"
      3 #include "Direct3DContentProvider.h"
      4 #include <windows.storage.streams.h>
      5 #include <wrl.h>
      6 #include <robuffer.h>
      7 #include <opencv2\core\core.hpp>
      8 #include <opencv2\imgproc\imgproc.hpp>
      9 #include <opencv2\features2d\features2d.hpp>
     10 #include <algorithm>
     11 
     12 using namespace Windows::Storage::Streams;
     13 using namespace Microsoft::WRL;
     14 using namespace Windows::Foundation;
     15 using namespace Windows::UI::Core;
     16 using namespace Microsoft::WRL;
     17 using namespace Windows::Phone::Graphics::Interop;
     18 using namespace Windows::Phone::Input::Interop;
     19 using namespace Windows::Foundation;
     20 using namespace Windows::Foundation::Collections;
     21 using namespace Windows::Phone::Media::Capture;
     22 
     23 #if !defined(_M_ARM)
     24 #pragma message("warning: Direct3DInterop.cpp: Windows Phone camera code does not run in the emulator.")
     25 #pragma message("warning: Direct3DInterop.cpp: Please compile as an ARM build and run on a device.")
     26 #endif
     27 
     28 namespace PhoneXamlDirect3DApp1Comp
     29 {
     30     // Called each time a preview frame is available
     31     void CameraCapturePreviewSink::OnFrameAvailable(
     32         DXGI_FORMAT format,
     33         UINT width,
     34         UINT height,
     35         BYTE* pixels
     36         )
     37     {
     38         m_Direct3dInterop->UpdateFrame(pixels, width, height);
     39     }
     40 
     41     // Called each time a captured frame is available
     42     void CameraCaptureSampleSink::OnSampleAvailable(
     43         ULONGLONG hnsPresentationTime,
     44         ULONGLONG hnsSampleDuration,
     45         DWORD cbSample,
     46         BYTE* pSample)
     47     {
     48 
     49 
     50     }
     51 
     52     Direct3DInterop::Direct3DInterop()
     53         : m_algorithm(OCVFilterType::ePreview)
     54         , m_contentDirty(false)
     55         , m_backFrame(nullptr)
     56         , m_frontFrame(nullptr)
     57     {
     58     }
     59 
     60     bool Direct3DInterop::SwapFrames()
     61     {
     62         std::lock_guard<std::mutex> lock(m_mutex);
     63         if(m_backFrame != nullptr)
     64         {
     65             std::swap(m_backFrame, m_frontFrame);
     66             return true;
     67         }
     68         return false;
     69     }
     70 
     71     void Direct3DInterop::UpdateFrame(byte* buffer,int width,int height)
     72     {
     73         std::lock_guard<std::mutex> lock(m_mutex);
     74         if(m_backFrame == nullptr)
     75         {
     76             m_backFrame = std::shared_ptr<cv::Mat> (new cv::Mat(height, width, CV_8UC4));
     77             m_frontFrame = std::shared_ptr<cv::Mat> (new cv::Mat(height, width, CV_8UC4));
     78         }
     79 
     80         memcpy(m_backFrame.get()->data, buffer, 4 * height*width);
     81         m_contentDirty = true;
     82         RequestAdditionalFrame();
     83     }
     84 
     85     void Direct3DInterop::ProcessFrame()
     86     {
     87         if (SwapFrames())
     88         {
     89             if (m_renderer)
     90             {
     91                 cv::Mat* mat = m_frontFrame.get();
     92 
     93                 switch (m_algorithm)
     94                 {
     95                     case OCVFilterType::ePreview:
     96                     {
     97                         break;
     98                     }
     99 
    100                     case OCVFilterType::eGray:
    101                     {
    102                         ApplyGrayFilter(mat);
    103                         break;
    104                     }
    105 
    106                     case OCVFilterType::eCanny:
    107                     {
    108                         ApplyCannyFilter(mat);
    109                         break;
    110                     }
    111 
    112                     case OCVFilterType::eBlur:
    113                     {
    114                         ApplyBlurFilter(mat);
    115                         break;
    116                     }
    117 
    118                     case OCVFilterType::eFindFeatures:
    119                     {
    120                         ApplyFindFeaturesFilter(mat);
    121                         break;
    122                     }
    123 
    124                     case OCVFilterType::eSepia:
    125                     {
    126                         ApplySepiaFilter(mat);
    127                         break;
    128                     }
    129                 }
    130 
    131                 m_renderer->CreateTextureFromByte(mat->data, mat->cols, mat->rows);
    132             }
    133         }
    134     }
    135 
    136     void Direct3DInterop::ApplyGrayFilter(cv::Mat* mat)
    137     {
    138         cv::Mat intermediateMat;
    139         cv::cvtColor(*mat, intermediateMat, CV_RGBA2GRAY);
    140         cv::cvtColor(intermediateMat, *mat, CV_GRAY2BGRA);
    141     }
    142 
    143     void Direct3DInterop::ApplyCannyFilter(cv::Mat* mat)
    144     {
    145         cv::Mat intermediateMat;
    146         cv::Canny(*mat, intermediateMat, 80, 90);
    147         cv::cvtColor(intermediateMat, *mat, CV_GRAY2BGRA);
    148     }
    149 
    150     void Direct3DInterop::ApplyBlurFilter(cv::Mat* mat)
    151     {
    152         cv::Mat intermediateMat;
    153         //	cv::Blur(image, intermediateMat, 80, 90);
    154         cv::cvtColor(intermediateMat, *mat, CV_GRAY2BGRA);
    155     }
    156 
    157     void Direct3DInterop::ApplyFindFeaturesFilter(cv::Mat* mat)
    158     {
    159         cv::Mat intermediateMat;
    160         cv::Ptr<cv::FastFeatureDetector> detector = cv::FastFeatureDetector::create(50);
    161         std::vector<cv::KeyPoint> features;
    162 
    163         cv::cvtColor(*mat, intermediateMat, CV_RGBA2GRAY);
    164         detector->detect(intermediateMat, features);
    165 
    166         for( unsigned int i = 0; i < std::min(features.size(), (size_t)50); i++ )
    167         {
    168             const cv::KeyPoint& kp = features[i];
    169             cv::circle(*mat, cv::Point((int)kp.pt.x, (int)kp.pt.y), 10, cv::Scalar(255,0,0,255));
    170         }
    171     }
    172 
    173     void Direct3DInterop::ApplySepiaFilter(cv::Mat* mat)
    174     {
    175         const float SepiaKernelData[16] =
    176         {
    177             /* B */0.131f, 0.534f, 0.272f, 0.f,
    178             /* G */0.168f, 0.686f, 0.349f, 0.f,
    179             /* R */0.189f, 0.769f, 0.393f, 0.f,
    180             /* A */0.000f, 0.000f, 0.000f, 1.f
    181         };
    182 
    183         const cv::Mat SepiaKernel(4, 4, CV_32FC1, (void*)SepiaKernelData);
    184         cv::transform(*mat, *mat, SepiaKernel);
    185     }
    186 
    187     IDrawingSurfaceContentProvider^ Direct3DInterop::CreateContentProvider()
    188     {
    189         ComPtr<Direct3DContentProvider> provider = Make<Direct3DContentProvider>(this);
    190         return reinterpret_cast<IDrawingSurfaceContentProvider^>(provider.Detach());
    191     }
    192 
    193     // IDrawingSurfaceManipulationHandler
    194     void Direct3DInterop::SetManipulationHost(DrawingSurfaceManipulationHost^ manipulationHost)
    195     {
    196         manipulationHost->PointerPressed +=
    197             ref new TypedEventHandler<DrawingSurfaceManipulationHost^, PointerEventArgs^>(this, &Direct3DInterop::OnPointerPressed);
    198 
    199         manipulationHost->PointerMoved +=
    200             ref new TypedEventHandler<DrawingSurfaceManipulationHost^, PointerEventArgs^>(this, &Direct3DInterop::OnPointerMoved);
    201 
    202         manipulationHost->PointerReleased +=
    203             ref new TypedEventHandler<DrawingSurfaceManipulationHost^, PointerEventArgs^>(this, &Direct3DInterop::OnPointerReleased);
    204     }
    205 
    206     void Direct3DInterop::RenderResolution::set(Windows::Foundation::Size renderResolution)
    207     {
    208         if (renderResolution.Width  != m_renderResolution.Width ||
    209             renderResolution.Height != m_renderResolution.Height)
    210         {
    211             m_renderResolution = renderResolution;
    212 
    213             if (m_renderer)
    214             {
    215                 m_renderer->UpdateForRenderResolutionChange(m_renderResolution.Width, m_renderResolution.Height);
    216                 RecreateSynchronizedTexture();
    217             }
    218         }
    219     }
    220 
    221     // Event Handlers
    222 
    223     void Direct3DInterop::OnPointerPressed(DrawingSurfaceManipulationHost^ sender, PointerEventArgs^ args)
    224     {
    225         // Insert your code here.
    226     }
    227 
    228     void Direct3DInterop::OnPointerMoved(DrawingSurfaceManipulationHost^ sender, PointerEventArgs^ args)
    229     {
    230         // Insert your code here.
    231     }
    232 
    233     void Direct3DInterop::OnPointerReleased(DrawingSurfaceManipulationHost^ sender, PointerEventArgs^ args)
    234     {
    235         // Insert your code here.
    236     }
    237 
    238     void Direct3DInterop::StartCamera()
    239     {
    240         // Set the capture dimensions
    241         Size captureDimensions;
    242         captureDimensions.Width = 640;
    243         captureDimensions.Height = 480;
    244 
    245         // Open the AudioVideoCaptureDevice for video only
    246         IAsyncOperation<AudioVideoCaptureDevice^> ^openOperation = AudioVideoCaptureDevice::OpenForVideoOnlyAsync(CameraSensorLocation::Back, captureDimensions);
    247 
    248         openOperation->Completed = ref new AsyncOperationCompletedHandler<AudioVideoCaptureDevice^>(
    249             [this] (IAsyncOperation<AudioVideoCaptureDevice^> ^operation, Windows::Foundation::AsyncStatus status)
    250             {
    251                 if (status == Windows::Foundation::AsyncStatus::Completed)
    252                 {
    253                     auto captureDevice = operation->GetResults();
    254 
    255                     // Save the reference to the opened video capture device
    256                     pAudioVideoCaptureDevice = captureDevice;
    257 
    258                     // Retrieve the native ICameraCaptureDeviceNative interface from the managed video capture device
    259                     ICameraCaptureDeviceNative *iCameraCaptureDeviceNative = NULL;
    260                     HRESULT hr = reinterpret_cast<IUnknown*>(captureDevice)->QueryInterface(__uuidof(ICameraCaptureDeviceNative), (void**) &iCameraCaptureDeviceNative);
    261 
    262                     // Save the pointer to the native interface
    263                     pCameraCaptureDeviceNative = iCameraCaptureDeviceNative;
    264 
    265                     // Initialize the preview dimensions (see the accompanying article at )
    266                     // The aspect ratio of the capture and preview resolution must be equal,
    267                     // 4:3 for capture => 4:3 for preview, and 16:9 for capture => 16:9 for preview.
    268                     Size previewDimensions;
    269                     previewDimensions.Width = 640;
    270                     previewDimensions.Height = 480;
    271 
    272                     IAsyncAction^ setPreviewResolutionAction = pAudioVideoCaptureDevice->SetPreviewResolutionAsync(previewDimensions);
    273                     setPreviewResolutionAction->Completed = ref new AsyncActionCompletedHandler(
    274                         [this](IAsyncAction^ action, Windows::Foundation::AsyncStatus status)
    275                         {
    276                             HResult hr = action->ErrorCode;
    277 
    278                             if (status == Windows::Foundation::AsyncStatus::Completed)
    279                             {
    280                                 // Create the sink
    281                                 MakeAndInitialize<CameraCapturePreviewSink>(&pCameraCapturePreviewSink);
    282                                 pCameraCapturePreviewSink->SetDelegate(this);
    283                                 pCameraCaptureDeviceNative->SetPreviewSink(pCameraCapturePreviewSink);
    284 
    285                                 // Set the preview format
    286                                 pCameraCaptureDeviceNative->SetPreviewFormat(DXGI_FORMAT::DXGI_FORMAT_B8G8R8A8_UNORM);
    287                             }
    288                         }
    289                     );
    290 
    291                     // Retrieve IAudioVideoCaptureDeviceNative native interface from managed projection.
    292                     IAudioVideoCaptureDeviceNative *iAudioVideoCaptureDeviceNative = NULL;
    293                     hr = reinterpret_cast<IUnknown*>(captureDevice)->QueryInterface(__uuidof(IAudioVideoCaptureDeviceNative), (void**) &iAudioVideoCaptureDeviceNative);
    294 
    295                     // Save the pointer to the IAudioVideoCaptureDeviceNative native interface
    296                     pAudioVideoCaptureDeviceNative = iAudioVideoCaptureDeviceNative;
    297 
    298                     // Set sample encoding format to ARGB. See the documentation for further values.
    299                     pAudioVideoCaptureDevice->VideoEncodingFormat = CameraCaptureVideoFormat::Argb;
    300 
    301                     // Initialize and set the CameraCaptureSampleSink class as sink for captures samples
    302                     MakeAndInitialize<CameraCaptureSampleSink>(&pCameraCaptureSampleSink);
    303                     pAudioVideoCaptureDeviceNative->SetVideoSampleSink(pCameraCaptureSampleSink);
    304 
    305                     // Start recording (only way to receive samples using the ICameraCaptureSampleSink interface
    306                     pAudioVideoCaptureDevice->StartRecordingToSinkAsync();
    307                 }
    308             }
    309         );
    310 
    311     }
    312     // Interface With Direct3DContentProvider
    313     HRESULT Direct3DInterop::Connect(_In_ IDrawingSurfaceRuntimeHostNative* host)
    314     {
    315         m_renderer = ref new QuadRenderer();
    316         m_renderer->Initialize();
    317         m_renderer->UpdateForWindowSizeChange(WindowBounds.Width, WindowBounds.Height);
    318         m_renderer->UpdateForRenderResolutionChange(m_renderResolution.Width, m_renderResolution.Height);
    319         StartCamera();
    320 
    321         return S_OK;
    322     }
    323 
    324     void Direct3DInterop::Disconnect()
    325     {
    326         m_renderer = nullptr;
    327     }
    328 
    329     HRESULT Direct3DInterop::PrepareResources(_In_ const LARGE_INTEGER* presentTargetTime, _Out_ BOOL* contentDirty)
    330     {
    331         *contentDirty = m_contentDirty;
    332         if(m_contentDirty)
    333         {
    334             ProcessFrame();
    335         }
    336         m_contentDirty = false;
    337         return S_OK;
    338     }
    339 
    340     HRESULT Direct3DInterop::GetTexture(_In_ const DrawingSurfaceSizeF* size, _Out_ IDrawingSurfaceSynchronizedTextureNative** synchronizedTexture, _Out_ DrawingSurfaceRectF* textureSubRectangle)
    341     {
    342         m_renderer->Update();
    343         m_renderer->Render();
    344         return S_OK;
    345     }
    346 
    347     ID3D11Texture2D* Direct3DInterop::GetTexture()
    348     {
    349         return m_renderer->GetTexture();
    350     }
    351 }
    352