How to create Texture2D with CreateExternalTexture and a pointer passed from C++ library?

  c++, directx-11, textures, unity3d

I am trying to understand how to establish communication between C++ DirectX11 code and Unity Engine.

What I am trying to do is create a very simple ID3D11Texture2D in C++ and then pass it to Unity, creating its Texture2D by the means of CreateExternalTexture method.

So, I have a C++ DLL VS Project where I have this one function:

ID3D11ShaderResourceView* GetTexture()
{
    ofstream myfile;
    myfile.open("GetTexture.log");

    const int width = 640;
    const int height = 480;

    unsigned char* pixels = new unsigned char[width * height * 4];
    unsigned char* pixelsP = pixels;


    int numPixels = (int)(width * height);

    ID3D11Texture2D* tex = nullptr;
    ID3D11ShaderResourceView* shaderView = nullptr;

    ID3D11Device* device = CreateDX11device();

    if (!device)
    {
        myfile << "Cannot create DX11 device" << endl;
        myfile.close();
        return 0;
    }
    else
    {
        myfile << "Successfully created DX11 device" << endl;
    }

    for (int i = 0; i < numPixels; i++)
    {
        *(pixelsP) = 255;
        *(pixelsP + 1) = 0;
        *(pixelsP + 2) = 0;
        *(pixelsP + 3) = 122;
        pixelsP += 4;
    }

    D3D11_SUBRESOURCE_DATA initData = { 0 };
    initData.pSysMem = (const void*)pixels;
    initData.SysMemPitch = width * 4;
    initData.SysMemSlicePitch = width * height * 4;

    D3D11_TEXTURE2D_DESC desc = {};
    desc.Width = width;
    desc.Height = height;
    desc.MipLevels = 1;
    desc.ArraySize = 1;
    desc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
    desc.SampleDesc.Count = 1;
    desc.Usage = D3D11_USAGE_DYNAMIC;//D3D11_USAGE_IMMUTABLE;
    desc.BindFlags = D3D11_BIND_SHADER_RESOURCE;// | D3D11_BIND_RENDER_TARGET;//D3D11_BIND_SHADER_RESOURCE;
    desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;

    HRESULT hr = device->CreateTexture2D(&desc, &initData, &tex);

    if (FAILED(hr))
    {
        myfile << "FAILED TO CREATE DX TEXTURE" << endl;
        myfile << hr << endl;
        myfile.close();
        return nullptr;
    }
    else
    {
        myfile << "Successfully Created DX Texture" << endl;
    }

    myfile.close();

    D3D11_SHADER_RESOURCE_VIEW_DESC shaderResourceViewDesc;
    ZeroMemory(&shaderResourceViewDesc, sizeof(shaderResourceViewDesc));
    
    shaderResourceViewDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM;
    shaderResourceViewDesc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
    shaderResourceViewDesc.Texture2D.MostDetailedMip = 0;
    shaderResourceViewDesc.Texture2D.MipLevels = 1;

    device->CreateShaderResourceView(tex, &shaderResourceViewDesc, &shaderView);

    return shaderView;
}

And here is how I create my DX11 device:

ID3D11Device* CreateDX11device()
{
    ID3D11Device* device = nullptr;
    HRESULT hr = S_OK;
    UINT createDeviceFlags = D3D11_CREATE_DEVICE_BGRA_SUPPORT | D3D11_CREATE_DEVICE_DEBUG;

    D3D_DRIVER_TYPE driverTypes[] = {
        D3D_DRIVER_TYPE_HARDWARE,
        D3D_DRIVER_TYPE_WARP,
        D3D_DRIVER_TYPE_REFERENCE,
    };

    UINT numDriverTypes = ARRAYSIZE(driverTypes);

    D3D_FEATURE_LEVEL featureLevels[] = {
        D3D_FEATURE_LEVEL_11_0,
        D3D_FEATURE_LEVEL_10_1,
        D3D_FEATURE_LEVEL_10_0,
        D3D_FEATURE_LEVEL_9_3,
        D3D_FEATURE_LEVEL_9_1
    };

    ID3D11DeviceContext* context = nullptr;

    UINT numFeatureLevels = ARRAYSIZE(featureLevels);
    
    hr = D3D11CreateDevice(nullptr,
        D3D_DRIVER_TYPE_HARDWARE,
        nullptr,
        createDeviceFlags,
        featureLevels,
        numFeatureLevels,
        D3D11_SDK_VERSION,
        &device,
        nullptr,
        &context);

    if (FAILED(hr)) {
        cout << "Could not create DX11 device" << endl;
        return nullptr;
    }

    cout << "DX Device Created" << endl;

    return device;
}

Now, the function GetTexture is simply called from C# code in Unity and the result is treated as IntPtr. I then start creating the Texture2D and setting the texture to view the result in Unity:

[DllImport("TestDX11.dll", CallingConvention = CallingConvention.StdCall)]
public static extern IntPtr GetTexture();

void Start()
{
    IntPtr ptr = GetTexture();

    Texture2D texture = Texture2D.CreateExternalTexture(640, 480, TextureFormat.RGBA32, false, false, ptr);
    GetComponent<RawImage>().texture = texture;
}

The problem is that I am expecting to see a red image in Unity app but I see nothing, the texture is blank, as if I was not putting anything in it. And to be sure I tried outputting the received pointer from the DLL and it is non-null, so I do actually get a pointer at the Texture. I feel quite lost at this point as I tried playing around with different flags during texture creation, but nothing really helped.

I also tried to send ID3D11Texture2D* instead of ID3D11ResourceShaderView* as this link points out, but it crashes Unity when CreateExternalTexture method is called for some reason.

I would appreciate any help!

Source: Windows Questions C++

LEAVE A COMMENT