Using GStreamer with Ogre         How to set up a GStreamer playbin2 element to stream video content to an Ogre texture

Introduction


The following snippet demonstrates how to set up a GStreamer playbin2 element to stream video content to an Ogre texture (which is displayed via an overlay). The snippet has been tested on Ubuntu 10.04, using Ogre 1.7 and GStreamer 0.10. The snippet has been tested on various video files, which seem to work well, and briefly with DVD input, which seemed a bit temperamental.

Note that I may not be releasing references to GStreamer object correctly, and threading may not be configured correctly/used properly with Ogre. I just don't have much experience in these areas, so if someone who does thinks they could fix up the code, then I encourage them to add their changes to this page.

Note the include files at the top. You'll find SdkCameraMan.h and SdkTrays.h in the Ogre directory somewhere.

Source code

#include <iostream>

#include <gst/gst.h>
#include <gst/app/gstappsink.h>
#include <ExampleApplication.h>

#include "SdkCameraMan.h"
#include "SdkTrays.h"

// Declare a subclass of the ExampleApplication class
class MediaPlayer : public ExampleApplication, public OgreBites::SdkTrayListener
{
private:
    //--------------------------------------------------------------------------
    // Declare a subclass of the ExampleFrameListener class
    class Listener : public ExampleFrameListener,
        public OIS::MouseListener,public OIS::KeyListener
    {
        friend class MediaPlayer;

    private:
        MediaPlayer* mMediaPlayer;

    public:
        //--------------------------------------------------------------------------
        Listener(RenderWindow* win, Camera* cam, MediaPlayer* mediaPlayer) :
            ExampleFrameListener(win, cam, true, true),
            mMediaPlayer(mediaPlayer)
        {
            mMouse->setEventCallback(this);
            mKeyboard->setEventCallback(this);
        }

        //--------------------------------------------------------------------------
        bool frameStarted(const FrameEvent& evt)
        {
            if(mMouse)
                mMouse->capture();

            if(mKeyboard)
                mKeyboard->capture();

            return mMediaPlayer->mRunning &&
                ExampleFrameListener::frameStarted(evt);
        }

        //--------------------------------------------------------------------------
        bool frameEnded(const FrameEvent& evt)
        {
            return ExampleFrameListener::frameEnded(evt);
        }

        //--------------------------------------------------------------------------
        bool frameRenderingQueued(const FrameEvent& evt)
        {
            // For whatever reason, textures can only be updated dynamically
            // from within this function.
            mMediaPlayer->updateVideo();

            return ExampleFrameListener::frameRenderingQueued(evt);
        }

        //--------------------------------------------------------------------------
        bool mousePressed(const OIS::MouseEvent& evt, OIS::MouseButtonID id)
        {
            if (mMediaPlayer->mTrayManager &&
                    mMediaPlayer->mTrayManager->injectMouseDown(evt, id))
            {
                return true;
            }

            mMediaPlayer->mCameraMan->injectMouseDown(evt, id);

            return true;
        }

        //--------------------------------------------------------------------------
        bool mouseReleased(const OIS::MouseEvent& evt, OIS::MouseButtonID id)
        {
            if (mMediaPlayer->mTrayManager &&
                    mMediaPlayer->mTrayManager->injectMouseUp(evt, id))
            {
                return true;
            }

            mMediaPlayer->mCameraMan->injectMouseUp(evt, id);

            return true;
        }

        //--------------------------------------------------------------------------
        bool mouseMoved(const OIS::MouseEvent& evt)
        {
            if (mMediaPlayer->mTrayManager &&
                    mMediaPlayer->mTrayManager->injectMouseMove(evt))
            {
                return true;
            }

            mMediaPlayer->mCameraMan->injectMouseMove(evt);

            return true;
        }

        //--------------------------------------------------------------------------
        bool keyPressed(const OIS::KeyEvent &evt)
        {
            switch (evt.key)
            {
            case OIS::KC_ESCAPE:
                mMediaPlayer->mRunning = false;
                break;

            case OIS::KC_SPACE:
                mMediaPlayer->togglePlaying();
                break;

            default:
                break;
            }

            mMediaPlayer->mCameraMan->injectKeyDown(evt);

            return true;
        }

        //--------------------------------------------------------------------------
        bool keyReleased(const OIS::KeyEvent &evt)
        {
            mMediaPlayer->mCameraMan->injectKeyUp(evt);

            return true;
        }

    };

private:
    Listener* mListener;

    GstElement* mPlayer;
    GstElement* mAppSink;

    OgreBites::SdkCameraMan* mCameraMan;
    OgreBites::SdkTrayManager* mTrayManager;
    Overlay* mVideoOverlay;
    TexturePtr mVideoTexture;
    MaterialPtr mVideoMaterial;
    HardwarePixelBufferSharedPtr mTextureBuffer;

    bool mRunning;
    bool mPlaying;
    bool mNewBufferExists;
    int mVideoWidth;
    int mVideoHeight;

public:
    MediaPlayer() :
        mListener(0),
        mPlayer(0),
        mAppSink(0),
        mCameraMan(0),
        mTrayManager(0),
        mVideoOverlay(0),
        mVideoTexture(),
        mVideoMaterial(),
        mTextureBuffer(),
        mRunning(true),
        mPlaying(false),
        mNewBufferExists(false),
        mVideoWidth(-1),
        mVideoHeight(-1)
    {
    }

protected:
    // Define what is in the scene
    void createScene(void)
    {
        // Setup the camera and GUI.
        mCameraMan = new OgreBites::SdkCameraMan(mCamera);
        mCameraMan->setStyle(OgreBites::CS_ORBIT);

        // Setup basic lighting.
        mSceneMgr->setAmbientLight(ColourValue(0.3, 0.3, 0.3));
        mSceneMgr->createLight()->setPosition(20, 80, 50);

        // Create a temporary texture (otherwise the overlay will screw up).
        mVideoTexture = Ogre::TextureManager::getSingleton().createManual(
                        "VideoTexture",
                        ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME,
                        TEX_TYPE_2D,
                        1, 1,
                        0, PF_B8G8R8A8,
                        TU_DYNAMIC_WRITE_ONLY);

        // Create the material to apply to the overlay.
        mVideoMaterial = MaterialManager::getSingleton().create(
                "VideoMaterial",
                ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME);
        Ogre::Technique* technique = mVideoMaterial->createTechnique();
        technique->createPass();
        mVideoMaterial->getTechnique(0)->getPass(0)->createTextureUnitState(
                mVideoTexture->getName());

        // Create the overlay.
        mVideoOverlay = OverlayManager::getSingleton().create("overlay");
        OverlayContainer* videoPanel = static_cast<OverlayContainer*>(
                OverlayManager::getSingleton().createOverlayElement(
                        "Panel", "videoPanel"));

        mVideoOverlay->add2D(videoPanel);
        mVideoOverlay->show();
        videoPanel->setMaterialName(mVideoMaterial->getName());


        // Initialise GStreamer.
        gst_init(0, 0);

        // I'm not entirely sure how threading works in GStreamer,
        // I just hope that it does :P.
        if (!g_thread_supported())
        {
           g_thread_init(0);
        }

        // This is the path to the media file.
        const char* uri = "file:///home/fred/videos/myvid.mov"; //"dvd://";

        // playbin2 is an all-in-one audio and video player.
        // It should be good enough for most uses.
        mPlayer = gst_element_factory_make("playbin2", "player");

        // Listen for messages on the playbin pipeline bus.
        GstBus* bus = gst_pipeline_get_bus(GST_PIPELINE(mPlayer));
        gst_bus_add_watch(bus, onBusMessage, getUserData(mPlayer));
        gst_object_unref(bus);

        // By default, playbin creates its own window to which it streams
        // video output.  We create an appsink element to allow video to be
        // streamed to an Ogre texture instead.
        mAppSink = gst_element_factory_make("appsink", "app_sink");

        // Set the appsink to emit signals (so we know when a new frame has
        // arrived), and to drop frames instead of buffer them (we want a
        // realtime video player).
        g_object_set(G_OBJECT(mAppSink), "emit-signals", true, NULL);
        g_object_set(G_OBJECT(mAppSink), "max-buffers", 1, NULL);
        g_object_set(G_OBJECT(mAppSink), "drop", true, NULL);

        // Listen for new-buffer signals.
        g_signal_connect(G_OBJECT(mAppSink), "new-buffer",
                G_CALLBACK(onNewBuffer), this);

        // Create a filter to produce simple rgb (actually, bgra) data.
        GstCaps* caps = gst_caps_new_simple("video/x-raw-rgb", 0);
        GstElement* rgbFilter = gst_element_factory_make("capsfilter", "rgb_filter");
        g_object_set(G_OBJECT(rgbFilter), "caps", caps, NULL);
        gst_caps_unref(caps);

        // Create a bin to combine the rgb conversion with the appsink.
        GstElement* appBin = gst_bin_new("app_bin");

        // Add the filter to the bin, then attach a ghostpad to allow the
        // output of the filter to connect to the input of the appsink.
        gst_bin_add(GST_BIN(appBin), rgbFilter);
        GstPad* rgbSinkPad = gst_element_get_static_pad(rgbFilter, "sink");
        GstPad* ghostPad = gst_ghost_pad_new("app_bin_sink", rgbSinkPad);
        gst_object_unref(rgbSinkPad);
        gst_element_add_pad(appBin, ghostPad);

        // Add the appsink to the bin.
        gst_bin_add_many(GST_BIN(appBin), mAppSink, NULL);
        gst_element_link_many(rgbFilter, mAppSink, NULL);

        // Replace the default window sink with our appsink, and set the
        // media file to play.
        g_object_set(G_OBJECT(mPlayer), "video-sink", appBin, NULL);
        g_object_set(G_OBJECT(mPlayer), "uri", uri, NULL);

        // Start playing the file.
        togglePlaying();
    }

    void destroyScene()
    {
       if (mPlayer)
       {
           gst_element_set_state(GST_ELEMENT(mPlayer), GST_STATE_NULL);
           gst_object_unref(mPlayer);
       }

       if (mCameraMan)
       {
           delete mCameraMan;
       }

       if (mTrayManager)
       {
           delete mTrayManager;
       }
    }

    // Create new frame listener
    void createFrameListener(void)
    {
        mFrameListener = mListener = new Listener(mWindow, mCamera, this);
        mRoot->addFrameListener(mFrameListener);

        mTrayManager = new OgreBites::SdkTrayManager("tray_man", mWindow,
                mListener->mMouse, this);
        mTrayManager->showCursor();
    }

    void togglePlaying()
    {
        if (mPlaying)
        {
            gst_element_set_state(GST_ELEMENT(mPlayer), GST_STATE_PAUSED);
        }
        else
        {
            gst_element_set_state(GST_ELEMENT(mPlayer), GST_STATE_PLAYING);
        }

        mPlaying = !mPlaying;
    }


    //--------------------------------------------------------------------------
    void updateVideo()
    {
        if (mNewBufferExists)
        {
            // Get the new buffer.
            GstBuffer* buffer;
            g_signal_emit_by_name(mAppSink, "pull-buffer", &buffer);

            // Obtain the dimensions of the video.
            GstCaps* caps = gst_buffer_get_caps(buffer);

            int width = 0;
            int height = 0;
            int ratioNum;
            int ratioDen;
            float pixelRatio = 1.0;

            for (size_t i = 0; i < gst_caps_get_size(caps); ++i)
            {
                GstStructure* structure = gst_caps_get_structure(caps, i);

                gst_structure_get_int(structure, "width", &width);
                gst_structure_get_int(structure, "height", &height);

                if (gst_structure_get_fraction(structure, "pixel-aspect-ratio",
                        &ratioNum, &ratioDen))
                {
                    pixelRatio = ratioNum / static_cast<float>(ratioDen);
                }
            }

            // If the video has changed dimensions (or only just started
            // streaming), create a new texture to match.
            if (width && height && (width != mVideoWidth || height != mVideoHeight))
            {
                mVideoWidth = width;
                mVideoHeight = height;

                TextureManager* mgr = Ogre::TextureManager::getSingletonPtr();

                // Delete old texture if it exists.
                if (!mVideoTexture.isNull())
                {
                    mgr->remove(mVideoTexture->getName());
                }

                // Create new texture with updated dimensions.
                mVideoTexture = Ogre::TextureManager::getSingleton().createManual(
                                "VideoTexture",
                                ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME,
                                TEX_TYPE_2D,
                                mVideoWidth, mVideoHeight,
                                0, PF_B8G8R8A8,
                                TU_DYNAMIC_WRITE_ONLY);

                // Apply texture to the material.
                mVideoMaterial->getTechnique(0)->getPass(0)->removeAllTextureUnitStates();
                mVideoMaterial->getTechnique(0)->getPass(0)->createTextureUnitState(
                        mVideoTexture->getName());

                // Calculate scaling factor for overlay screen (we don't want
                // a stretched video!)
                float widthRatio = mVideoWidth / static_cast<float>(mWindow->getWidth()) * pixelRatio;
                float heightRatio = mVideoHeight / static_cast<float>(mWindow->getHeight());
                float scale = widthRatio > heightRatio ? widthRatio : heightRatio;

                mVideoOverlay->setScale(widthRatio / scale, heightRatio / scale);
            }

            // Lock texture buffer, copy new frame data, then unlock again.
            HardwarePixelBufferSharedPtr pixelBuffer = mVideoTexture->getBuffer();
            void* textureData = pixelBuffer->lock(HardwareBuffer::HBL_DISCARD);
            memcpy(textureData, GST_BUFFER_DATA(buffer), GST_BUFFER_SIZE(buffer));
            pixelBuffer->unlock();

            // Release the GStreamer buffer data, and reset the flag.
            gst_buffer_unref(buffer);
            mNewBufferExists = false;
        }
    }

    //--------------------------------------------------------------------------
    static GstElement* getPlayer(gpointer userData)
    {
       return reinterpret_cast<GstElement*>(userData);
    }

    //--------------------------------------------------------------------------
    static void* getUserData(GstElement* player)
    {
       return reinterpret_cast<void*>(player);
    }

    //--------------------------------------------------------------------------
    // Called by GStreamer when a new frame is available.
    // Data is processed within FrameListener::frameRenderingQueued.
    static GstFlowReturn onNewBuffer(GstAppSink *sink, gpointer userData)
    {
       MediaPlayer* mediaPlayer = reinterpret_cast<MediaPlayer*>(userData);
       assert(mediaPlayer);

       mediaPlayer->mNewBufferExists = true;

       return GST_FLOW_OK;
    }

    //--------------------------------------------------------------------------
    static gboolean onBusMessage(
           GstBus* bus, GstMessage* message, gpointer userData)
    {
       GstElement* player = getPlayer(userData);

       switch (GST_MESSAGE_TYPE(message))
       {
       case GST_MESSAGE_EOS:
           std::cout << "End of stream" << std::endl;
           gst_element_set_state(GST_ELEMENT(player), GST_STATE_NULL);
           break;

       case GST_MESSAGE_ERROR:
           std::cout << "Error" << std::endl;
           gst_element_set_state(GST_ELEMENT(player), GST_STATE_NULL);
           break;

       default:
           break;
       }

       return true;
    }

};

#ifdef __cplusplus
extern "C" {
#endif

#if OGRE_PLATFORM == OGRE_PLATFORM_WIN32
#define WIN32_LEAN_AND_MEAN
#include "windows.h"
INT WINAPI WinMain(HINSTANCE hInst, HINSTANCE, LPSTR strCmdLine, INT)
#else
int main(int argc, char **argv)
#endif
{
    // Instantiate our subclass
    MediaPlayer myApp;

    try {
        // ExampleApplication provides a go method, which starts the rendering.
        myApp.go();
    }
    catch (Ogre::Exception& e) {
#if OGRE_PLATFORM == OGRE_PLATFORM_WIN32
        MessageBoxA(NULL, e.getFullDescription().c_str(), "An exception has occured!", MB_OK | MB_ICONERROR | MB_TASKMODAL);
#else
        std::cerr << "Exception:\n";
        std::cerr << e.getFullDescription().c_str() << "\n";
#endif
        return 1;
    }

return 0;
}

#ifdef __cplusplus
}
#endif