summaryrefslogtreecommitdiff
path: root/src/player/VideoNode.h
blob: 2ae2caf49d9f696c42125a61e7e4d43445b75b7c (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
//
//  libavg - Media Playback Engine. 
//  Copyright (C) 2003-2014 Ulrich von Zadow
//
//  This library is free software; you can redistribute it and/or
//  modify it under the terms of the GNU Lesser General Public
//  License as published by the Free Software Foundation; either
//  version 2 of the License, or (at your option) any later version.
//
//  This library is distributed in the hope that it will be useful,
//  but WITHOUT ANY WARRANTY; without even the implied warranty of
//  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
//  Lesser General Public License for more details.
//
//  You should have received a copy of the GNU Lesser General Public
//  License along with this library; if not, write to the Free Software
//  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
//
//  Current versions can be found at www.libavg.de
//

#ifndef _VideoNode_H_
#define _VideoNode_H_

// Python docs say python.h should be included before any standard headers (!)
#include "../api.h"
#include "WrapPython.h" 

#include "Node.h"
#include "RasterNode.h"

#include "../base/GLMHelper.h"
#include "../base/IFrameEndListener.h"
#include "../base/UTF8String.h"

#include "../video/VideoDecoder.h"

namespace avg {

class VideoDecoder;
class TextureMover;
typedef boost::shared_ptr<TextureMover> TextureMoverPtr;

class AVG_API VideoNode: public RasterNode, IFrameEndListener
{
    public:
        enum VideoAccelType {NONE, VDPAU};

        static void registerType();
        
        VideoNode(const ArgList& args);
        virtual ~VideoNode();
        
        virtual void connectDisplay();
        virtual void connect(CanvasPtr pCanvas);
        virtual void disconnect(bool bKill);

        void play();
        void stop();
        void pause();

        const UTF8String& getHRef() const;
        void setHRef(const UTF8String& href);
        float getVolume();
        void setVolume(float volume);
        float getFPS() const;
        int getQueueLength() const;
        void checkReload();

        int getNumFrames() const;
        int getCurFrame() const;
        int getNumFramesQueued() const;
        void seekToFrame(int frameNum);
        std::string getStreamPixelFormat() const;
        long long getDuration() const;
        long long getVideoDuration() const;
        long long getAudioDuration() const;
        int getBitrate() const;
        std::string getContainerFormat() const;
        std::string getVideoCodec() const;
        std::string getAudioCodec() const;
        int getAudioSampleRate() const;
        int getNumAudioChannels() const;

        long long getCurTime() const;
        void seekToTime(long long time);
        bool getLoop() const;
        bool isThreaded() const;
        bool hasAudio() const;
        bool hasAlpha() const;
        void setEOFCallback(PyObject * pEOFCallback);
        bool isAccelerated() const;

        virtual void preRender(const VertexArrayPtr& pVA, bool bIsParentActive, 
                float parentEffectiveOpacity);
        virtual void render();
        virtual void onFrameEnd();
        
        virtual IntPoint getMediaSize();

        static VideoAccelType getVideoAccelConfig();

    private:
        bool renderFrame();
        void seek(long long destTime);
        void onEOF();
        void updateStatusDueToDecoderEOF();
        void dumpFramesTooLate();

        void open();
        void startDecoding();
        void createTextures(IntPoint size);
        void close();
        enum VideoState {Unloaded, Paused, Playing};
        void changeVideoState(VideoState NewVideoState);
        PixelFormat getPixelFormat() const;
        long long getNextFrameTime() const;
        void exceptionIfNoAudio(const std::string& sFuncName) const;
        void exceptionIfUnloaded(const std::string& sFuncName) const;

        VideoState m_VideoState;

        bool m_bFrameAvailable;
        bool m_bFirstFrameDecoded;

        UTF8String m_href;
        std::string m_Filename;
        bool m_bLoop;
        bool m_bThreaded;
        float m_FPS;
        int m_QueueLength;
        bool m_bEOFPending;
        PyObject * m_pEOFCallback;
        int m_FramesTooLate;
        int m_FramesInRowTooLate;
        int m_FramesPlayed;
        bool m_bSeekPending;
        long long m_SeekBeforeCanRenderTime;

        long long m_StartTime;
        long long m_PauseTime;
        long long m_PauseStartTime;
        float m_JitterCompensation;

        VideoDecoder * m_pDecoder;
        float m_Volume;
        bool m_bUsesHardwareAcceleration;
        bool m_bEnableSound;
        int m_AudioID;

        GLTexturePtr m_pTextures[4];
};

}

#endif