Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Is there a way for this to work with URP? #37

Open
rrahim opened this issue May 31, 2023 · 1 comment
Open

Is there a way for this to work with URP? #37

rrahim opened this issue May 31, 2023 · 1 comment

Comments

@rrahim
Copy link

rrahim commented May 31, 2023

I was initially confused when I added it to a project and it didn't work (no errors either).
I then ran the sample project and it worked fine.

Converted a test project of mine to Built In RP, and voila it works.

So I'm guessing the documentation doesn't mention that URP is not supported.

Has anyone found a way to support URP using UnityCapture?

@ShutoTakashita
Copy link

ShutoTakashita commented Sep 23, 2024

Hi,

I had the same problem. It was necessary to use the URP rendering pipeline to capture a single UI on two cameras.

I changed UnityCapture.cs as follows and it worked fine. This is the code I had GPT4 generate, so I don't really understand the logic either. I'm not sure I actually understand the logic behind it, but I'm sharing it here because I think it will be helpful to those who just need it to work.

 using UnityEngine;
 using System.Collections;

 [RequireComponent(typeof(Camera))]
 public class UnityCapture : MonoBehaviour
 {
public enum ECaptureDevice { CaptureDevice1 = 0, CaptureDevice2 = 1, CaptureDevice3 = 2, CaptureDevice4 = 3, CaptureDevice5 = 4, CaptureDevice6 = 5, CaptureDevice7 = 6, CaptureDevice8 = 7, CaptureDevice9 = 8, CaptureDevice10 = 9 }
public enum EResizeMode { Disabled = 0, LinearResize = 1 }
public enum EMirrorMode { Disabled = 0, MirrorHorizontally = 1 }
public enum ECaptureSendResult { SUCCESS = 0, WARNING_FRAMESKIP = 1, WARNING_CAPTUREINACTIVE = 2, ERROR_UNSUPPORTEDGRAPHICSDEVICE = 100, ERROR_PARAMETER = 101, ERROR_TOOLARGERESOLUTION = 102, ERROR_TEXTUREFORMAT = 103, ERROR_READTEXTURE = 104, ERROR_INVALIDCAPTUREINSTANCEPTR = 200 };

[SerializeField] [Tooltip("Capture device index")] public ECaptureDevice CaptureDevice = ECaptureDevice.CaptureDevice1;
[SerializeField] [Tooltip("Scale image if Unity and capture resolution don't match (can introduce frame dropping, not recommended)")] public EResizeMode ResizeMode = EResizeMode.Disabled;
[SerializeField] [Tooltip("How many milliseconds to wait for a new frame until sending is considered to be stopped")] public int Timeout = 1000;
[SerializeField] [Tooltip("Mirror captured output image")] public EMirrorMode MirrorMode = EMirrorMode.Disabled;
[SerializeField] [Tooltip("Introduce a frame of latency in favor of frame rate")] public bool DoubleBuffering = false;
[SerializeField] [Tooltip("Check to enable VSync during capturing")] public bool EnableVSync = false;
[SerializeField] [Tooltip("Set the desired render target frame rate")] public int TargetFrameRate = 60;
[SerializeField] [Tooltip("Check to disable output of warnings")] public bool HideWarnings = false;

Interface CaptureInterface;
Camera cam;
RenderTexture captureTexture;

void Awake()
{
    QualitySettings.vSyncCount = (EnableVSync ? 1 : 0);
    Application.targetFrameRate = TargetFrameRate;

    if (Application.runInBackground == false)
    {
        Debug.LogWarning("Application.runInBackground switched to enabled for capture streaming");
        Application.runInBackground = true;
    }
}

void Start()
{
    CaptureInterface = new Interface(CaptureDevice);

    cam = GetComponent<Camera>();

    // Create a RenderTexture with the same dimensions as the screen
    captureTexture = new RenderTexture(Screen.width, Screen.height, 24, RenderTextureFormat.Default);
    captureTexture.Create();

    // Assign the RenderTexture to the camera's targetTexture
    cam.targetTexture = captureTexture;

    // Start the coroutine to capture frames
    StartCoroutine(CaptureCoroutine());
}

void OnDestroy()
{
    CaptureInterface.Close();

    if (captureTexture != null)
    {
        cam.targetTexture = null;
        captureTexture.Release();
        Destroy(captureTexture);
    }
}

IEnumerator CaptureCoroutine()
{
    while (true)
    {
        // Wait for the end of the frame
        yield return new WaitForEndOfFrame();

        // Send the RenderTexture to the capture interface
        switch (CaptureInterface.SendTexture(captureTexture, Timeout, DoubleBuffering, ResizeMode, MirrorMode))
        {
            case ECaptureSendResult.SUCCESS: break;
            case ECaptureSendResult.WARNING_FRAMESKIP:
                if (!HideWarnings) Debug.LogWarning("[UnityCapture] Capture device did skip a frame read, capture frame rate will not match render frame rate.");
                break;
            case ECaptureSendResult.WARNING_CAPTUREINACTIVE:
                if (!HideWarnings) Debug.LogWarning("[UnityCapture] Capture device is inactive");
                break;
            case ECaptureSendResult.ERROR_UNSUPPORTEDGRAPHICSDEVICE:
                Debug.LogError("[UnityCapture] Unsupported graphics device (only D3D11 supported)");
                break;
            case ECaptureSendResult.ERROR_PARAMETER:
                Debug.LogError("[UnityCapture] Input parameter error");
                break;
            case ECaptureSendResult.ERROR_TOOLARGERESOLUTION:
                Debug.LogError("[UnityCapture] Render resolution is too large to send to capture device");
                break;
            case ECaptureSendResult.ERROR_TEXTUREFORMAT:
                Debug.LogError("[UnityCapture] Render texture format is unsupported (only basic non-HDR (ARGB32) and HDR (FP16/ARGB Half) formats are supported)");
                break;
            case ECaptureSendResult.ERROR_READTEXTURE:
                Debug.LogError("[UnityCapture] Error while reading texture image data");
                break;
            case ECaptureSendResult.ERROR_INVALIDCAPTUREINSTANCEPTR:
                Debug.LogError("[UnityCapture] Invalid Capture Instance Pointer");
                break;
        }
    }
}

public class Interface
{
    [System.Runtime.InteropServices.DllImport("UnityCapturePlugin")] extern static System.IntPtr CaptureCreateInstance(int CapNum);
    [System.Runtime.InteropServices.DllImport("UnityCapturePlugin")] extern static void CaptureDeleteInstance(System.IntPtr instance);
    [System.Runtime.InteropServices.DllImport("UnityCapturePlugin")] extern static ECaptureSendResult CaptureSendTexture(System.IntPtr instance, System.IntPtr nativetexture, int Timeout, bool UseDoubleBuffering, EResizeMode ResizeMode, EMirrorMode MirrorMode, bool IsLinearColorSpace);
    System.IntPtr CaptureInstance;

    public Interface(ECaptureDevice CaptureDevice)
    {
        CaptureInstance = CaptureCreateInstance((int)CaptureDevice);
    }

    ~Interface()
    {
        Close();
    }

    public void Close()
    {
        if (CaptureInstance != System.IntPtr.Zero) CaptureDeleteInstance(CaptureInstance);
        CaptureInstance = System.IntPtr.Zero;
    }

    public ECaptureSendResult SendTexture(Texture Source, int Timeout = 1000, bool DoubleBuffering = false, EResizeMode ResizeMode = EResizeMode.Disabled, EMirrorMode MirrorMode = EMirrorMode.Disabled)
    {
        if (CaptureInstance == System.IntPtr.Zero) return ECaptureSendResult.ERROR_INVALIDCAPTUREINSTANCEPTR;
        return CaptureSendTexture(CaptureInstance, Source.GetNativeTexturePtr(), Timeout, DoubleBuffering, ResizeMode, MirrorMode, QualitySettings.activeColorSpace == ColorSpace.Linear);
    }
}
 }

`

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

2 participants