我想使用ExoPlayer逐步从网址流式传输视频,但在Unity3D中显示视频 . 我首先在原生Android应用程序中使用surfaceview测试它,它的工作非常出色 . 但是,我需要将此功能包含在aar库中,并将其用作Unity的插件 . 我非常接近使其工作(或者看起来如此),但我知道我缺乏OpenGL知识并不让我前进 . 纹理应该在Unity端还是在插件端创建?应该何时重新创建以及如何重新创建?什么时候应该被销毁?

我尝试了几种组合,只显示了黑色纹理 . 音频适用于每种情况 . 代码是一团糟,因为现在我正在尝试我能想到的一切 . 我没有删除代码中的所有注释,因此您可以看到我使用的其他方法 . 我还在评论中提出了一些问题 . 我知道解决方案需要统一exoplayer opengl android知识的同时,所以I also posted this on Unity Android forum并将帖子链接到彼此 . 希望我可以将我在这里得到的答案与那些答案结合起来并提出解决方案 .

Android side:

public class ExoPlayerForUnity {
    static SurfaceTexture surfaceTexture;
    static Surface surface;

    static Context getUnityContext() {
        try {
            Class<?> unityPlayerClass = Class.forName("com.unity3d.player.UnityPlayer");
            Field currentActivity = unityPlayerClass.getField("currentActivity");
            Object o = currentActivity.get(null);
            return (Context)o;
        } catch (ClassNotFoundException e) {
            e.printStackTrace();
        } catch (NoSuchFieldException e) {
            e.printStackTrace();
        } catch (IllegalAccessException e) {
            e.printStackTrace();
        }

        return null;
    }


    public static int init( String uriString) {

        Uri videoURI = Uri.parse(uriString);

        final Activity unityContext = (Activity)getUnityContext();

        final Handler handler = new Handler();
//        BandwidthMeter bandwidthMeter = new DefaultBandwidthMeter();
//        TrackSelection.Factory videoTrackSelectionFactory = new AdaptiveVideoTrackSelection.Factory(bandwidthMeter);
//        TrackSelection.Factory videoTrackSelectionFactory = new FixedTrackSelection.Factory();
//        TrackSelector trackSelector = new DefaultTrackSelector(mainHandler, videoTrackSelectionFactory);
        TrackSelector trackSelector = new DefaultTrackSelector(handler, null);
        LoadControl loadControl = new DefaultLoadControl();
        final SimpleExoPlayer player = ExoPlayerFactory.newSimpleInstance(unityContext, trackSelector, loadControl, null, false);

        DefaultBandwidthMeter defBandwidthMeter = new DefaultBandwidthMeter();
        DataSource.Factory dataSourceFactory = new DefaultDataSourceFactory(unityContext,
                Util.getUserAgent(unityContext, "yourApplicationName"), defBandwidthMeter);
        ExtractorsFactory extractorsFactory = new DefaultExtractorsFactory();
        MediaSource videoSource = new ExtractorMediaSource(videoURI,
                dataSourceFactory, extractorsFactory, new Handler(), new ExtractorMediaSource.EventListener() {
            @Override
            public void onLoadError(IOException error) {
                error.printStackTrace();
            }
        });


        // Maybe useful?: http://stackoverflow.com/questions/33324753/how-to-use-unity-createexternaltexture-on-android
        int[] textures = new int[1];
        GLES20.glGenTextures(1, textures, 0);
        final int texture = textures[0];

//        //GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
//        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texture);
//        //GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGB, 1024, 512, 0, GLES20.GL_RGB, GLES20.GL_UNSIGNED_BYTE, null);
//        GLES20.glTexParameteri( GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR );
//        GLES20.glTexParameteri( GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR );
//        GLES20.glTexParameteri( GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE );
//        GLES20.glTexParameteri( GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE );
//        GLES20.glBindTexture( GLES20.GL_TEXTURE_2D, 0 );

        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture);
        GLES20.glTexParameteri( GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR );
        GLES20.glTexParameteri( GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR );
        GLES20.glTexParameteri( GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE );
        GLES20.glTexParameteri( GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE );
        GLES20.glBindTexture( GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0 );



        // TODO work with attachToGLContext maybe?
        surfaceTexture = new SurfaceTexture(texture);

        surfaceTexture.setOnFrameAvailableListener(new SurfaceTexture.OnFrameAvailableListener() {
            @Override
            public void onFrameAvailable(final SurfaceTexture sf) {
                handler.post(new Runnable() {
                    @Override
                    public void run() {
                        // Should I use this or update it in Unity-side with tex.UpdateTextureExternal(tex.GetNativeTexturePtr()) ?
                        //GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, texture);
                        //sf.updateTexImage();
                        //GLES20.glBindTexture( GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0 );
                    }
                });
            }
        });

        surface = new Surface(surfaceTexture);

        Timer t = new Timer();
        t.schedule(
                new TimerTask() {
                    @Override
                    public void run() {
                        Log.v("ExoPlayerForUnity", "buff: " + player.getBufferedPercentage());
                    }
                }, 3000, 1000
        );

        player.setVideoSurface(surface);
        player.prepare(videoSource);
        player.setPlayWhenReady(true);

        return texture;
    }
}

Unity side:

public class ExoTestPlugin : MonoBehaviour
{
    new Renderer renderer;
    Texture2D rt;
    //RenderTexture rt; // maybe this?
    AndroidJavaClass jc;
    int texName;
    float curScale = 0f;

    void init()
    {
        renderer = GetComponent<Renderer>();
        Camera.onPreRender += OnPreRender2;

        jc = new AndroidJavaClass("com.thefallengames.exoplayerforunity.ExoPlayerForUnity");

        texName = jc.CallStatic<int>("init", 0, "http://clips.vorwaerts-gmbh.de/big_buck_bunny.mp4");


        //rt = new Texture2D(640, 360, TextureFormat.RGBA32, true, true);
        //rt = new Texture2D(640, 360, TextureFormat.ETC2_RGBA8, true, true);
        //rt = new RenderTexture(640, 360, 24, RenderTextureFormat.Default, RenderTextureReadWrite.Default);
        //rt.Create();

        rt = Texture2D.CreateExternalTexture(1024, 512, TextureFormat.ETC_RGB4, true, true, (IntPtr)texName);

        renderer.material.mainTexture = rt;
    }

    void Update () {

        // Here I'm just starting the playback on first tap and then toggling between 2x and 1x scale on any subsequent taps
        if (Input.touchCount > 0 && Input.GetTouch(0).phase == TouchPhase.Ended)
        {
            if (curScale == 0f)
            {
                curScale = 1f;
                init();

                return;
            }

            if (curScale == 2f)
                curScale = .5f;
            else
                curScale = 2f;
            transform.localScale *= curScale;
        }
    }

    void OnPreRender2(Camera cam)
    {
        if (jc == null) // not inited
            return;

        if (UnityEngine.Random.Range(0, 60) != 0) // updating ~once per second only, just for debugging
            return;

        rt.UpdateExternalTexture((IntPtr)texName);
    }
}

EDIT: 还检查了this post,但它只是说它's possible, it doesn' t给出任何有用的细节 .
请注意,我还试图以统一的方式创建纹理,并将指针传递给java端,这似乎也不起作用