/*

 Project Carmack 0.01  (AKA Media Library Prototype 01/02)
 Copyright John Ryland, 2005

*/

using namespace std;

#include <iostream>
#include <list>
#include <map>

#include <assert.h>
#include <math.h>
#include <string.h>
#include <pthread.h>
#include <semaphore.h>
#include <unistd.h>
#include <fcntl.h>
#include <sys/soundcard.h>
#include <sys/ioctl.h>
#include <sys/stat.h>

#include <avformat.h>
#include <avcodec.h>


/*

Example sources to support:

file:/home/user/Documents/images/jpeg/picture.jpg
file:/home/user/Documents/audio/mpeg/greatestsong.mp3
file:/home/user/Documents/application/playlist/favourites.pls
file:/home/user/Documents/application/playlist/favourites.mpu
http://www.slashdot.org/somefile.mpg
http://www.streaming_radio_server.net:9000
http://www.streaming_tv_server.net:9000
camera
microphone
camera & microphone


Example outputs to support:

File/URL
UDP packets
TCP/IP packets
OSS
Alsa
QSS
Visualiser
QDirectPainter
QPainter
XShm
DirectDraw
YUV acceleration

*/

/*
1 = registerNewFormat("AAC", ".aac", "An AAC decoder", AUDIO_CODEC);
2 = registerNewFormat("MP3", ".mp3", "MP3 decoder", AUDIO_CODEC);
2 = registerNewFormat("MP3", ".mp3", "MAD decoder", AUDIO_CODEC);
1 = registerNewFormat("AAC", ".aac", "My AAC decoder", AUDIO_CODEC);
3 = registerNewFormat("3DS", ".3ds", "3D Studio File", AUDIO_CODEC);


enum FormatType {
    FRAME_ID_FILE_PROTO,
    FRAME_ID_HTTP_PROTO,
    FRAME_ID_RTSP_PROTO,
    FRAME_ID_RTP_PROTO,
    FRAME_ID_MMS_PROTO,

    FRAME_ID_GIF_FORMAT,
    FRAME_ID_JPG_FORMAT,
    FRAME_ID_PNG_FORMAT,

    FRAME_ID_MP3_FORMAT,
    FRAME_ID_WAV_FORMAT,
    FRAME_ID_GSM_FORMAT,
    FRAME_ID_AMR_FORMAT,

    FRAME_ID_MPG_FORMAT,
    FRAME_ID_AVI_FORMAT,
    FRAME_ID_MP4_FORMAT,
    FRAME_ID_MOV_FORMAT,

    FRAME_ID_FIRST_PACKET_TYPE,
    FRAME_ID_MPEG_AUDIO_PACKET = FRAME_ID_FIRST_PACKET_TYPE,
    FRAME_ID_MPEG1_VIDEO_PACKET,
    FRAME_ID_MPEG2_VIDEO_PACKET,
    FRAME_ID_MPEG4_VIDEO_PACKET,
    FRAME_ID_QT_VIDEO_PACKET,
    FRAME_ID_GSM_AUDIO_PACKET,
    FRAME_ID_AMR_AUDIO_PACKET,
    FRAME_ID_AAC_AUDIO_PACKET,
    FRAME_ID_LAST_PACKET_TYPE = FRAME_ID_AMR_AUDIO_PACKET,
    
    FRAME_ID_VIDEO_PACKET,
    FRAME_ID_AUDIO_PACKET,

    FRAME_ID_YUV420_VIDEO_FRAME,
    FRAME_ID_YUV422_VIDEO_FRAME,
    FRAME_ID_RGB16_VIDEO_FRAME,
    FRAME_ID_RGB24_VIDEO_FRAME,
    FRAME_ID_RGB32_VIDEO_FRAME,

    FRAME_ID_PCM_AUDIO_DATA,

    FRAME_ID_RENDERED_AUDIO,
    FRAME_ID_RENDERED_VIDEO,

    FRAME_ID_URL_SOURCE,
    FRAME_ID_AUDIO_SOURCE,
    FRAME_ID_VIDEO_SOURCE,

    FRAME_ID_MULTIPLE_FORMAT,
    FRAME_ID_ANY_ONE_OF_FORMAT,

    FRAME_ID_MULTIPLE_PACKET,
    FRAME_ID_ANY_ONE_OF_PACKET,

    FRAME_ID_UNKNOWN
};
*/

typedef struct FRAME_GENERIC {
/*    
    int generalId;
    int specificId;
    int streamId;
*/    
    int bytes;
    char* bits;
    int pts;
};

/*
enum videoCodecId {
    FRAME_ID_MPEG1_VIDEO_PACKET,
    FRAME_ID_MPEG2_VIDEO_PACKET,
    FRAME_ID_MPEG4_VIDEO_PACKET,
    FRAME_ID_QT_VIDEO_PACKET
};

typedef struct FRAME_VIDEO_PACKET {
    int codecId;
    int bytes;
    char* bits;
};

enum videoFrameFormat {
    FRAME_ID_YUV420_VIDEO_FRAME,
    FRAME_ID_YUV422_VIDEO_FRAME,
    FRAME_ID_RGB16_VIDEO_FRAME,
    FRAME_ID_RGB24_VIDEO_FRAME,
    FRAME_ID_RGB32_VIDEO_FRAME    
};

typedef struct FRAME_VIDEO_FRAME {
    int format;
    int width;
    int height;
    int bytes;
    char* bits;
};

struct UpPCMPacket {
    int freq;
    int bitsPerSample;
    int size;
    char data[1];
};

struct DownPCMPacket {
    
};

*/

#define WIDTH 160
#define HEIGHT 120

// Utils
class Thread {
public:
    Thread();
    int start( void* arg );

protected:
    int run( void* arg );
    static void* entryPoint( void* );
    virtual void setup() { };
    virtual void execute( void* ) = 0;
    void* arg() const { return arg_; }
    void setArg( void* a ) { arg_ = a; }
   
private:
    pthread_t tid_;
    void* arg_;
};

Thread::Thread() {}

int Thread::start( void* arg )
{
    setArg(arg);
    return pthread_create( &tid_, 0, Thread::entryPoint, this );
}

int Thread::run( void* arg )
{
    printf(" (pid: %i)", getpid() );
    setup();
    execute( arg );
}

void* Thread::entryPoint( void* pthis )
{
    Thread* pt = (Thread*)pthis;
    pt->run( pt->arg() );
}



// Format
class Format {
public:
    Format() : s( 0 ) { }
    Format(const Format &other) : s( other.s ) { }
    Format(const char *str) : s( str ) { }
    bool operator==(const Format& other) {
        return !strcmp(other.s, s);
    }
    operator const char *() {
        return s;
    }
    bool operator==(const char *other) {
        return !strcmp(s, other);
    }
    bool operator<(const Format& other) const {
        return strcmp(other.s, s) < 0;
    }
private:
    const char *s;
};



// Frame
class Frame {
public:
    Frame() { }
    
    Frame( const char* id, void* data )
        : counter( 0 ), type( id ), bits( data )
    {
        pthread_mutex_init( &mutex, NULL );
    }
    
    void ref() const
    {
        pthread_mutex_lock( &mutex );
        ++counter;
        pthread_mutex_unlock( &mutex );
    }
    
    void deref() const
    {
        pthread_mutex_lock( &mutex );
        --counter;
        pthread_mutex_unlock( &mutex );
    }
    
    int refcount() const 
    {
        int ret;
        pthread_mutex_lock( &mutex );
        ret = counter;
        pthread_mutex_unlock( &mutex );
        return ret;
    }
    
    Format id() const { return type; }
    void* data() const { return bits; }
        
private:
    mutable pthread_mutex_t mutex;
    mutable int counter;
    Format type;
    void *bits;
};


struct PCMData {
    int size;
    char data[65536];
};


struct YUVFrame {
    int width;
    int height;
    enum PixelFormat fmt;
    AVFrame *pic;
/*
    uchar *y;
    uchar *u;
    uchar *v;
    int scanlineWidth[3];
*/
};

/*
struct StreamPacket {
    void *private; // AVPacket *pkt;
    int streamId;
    int size;
    char *data;
};
*/

/*
struct StreamPacket {
    int streamId;
    Frame frame;
};
*/

class Module;

enum Commands { Init, Pull, Deref, Process, Simulate, ConnectToModule, ConnectedFrom };

typedef Module *Address;

struct Command {
    Address address;
    Commands command;
    const void *arg;
};

// CommandQueue
class CommandQueue {
public:
    CommandQueue( int size );
    
    void add( const Command & );
    const Command &remove();
    
private:
    int max;
    const Command **commands;
    int in, out;
    
    pthread_mutex_t mutex;
    sem_t free;
    sem_t used;
};

CommandQueue::CommandQueue( int size )
    : max( size ), in( 0 ), out( 0 )
{
    commands = new const Command*[max];
    pthread_mutex_init( &mutex, NULL );
    sem_init( &free, 0, max );
    sem_init( &used, 0, 0 );
}

void CommandQueue::add( const Command &command )
{
    while( sem_wait( &free ) != 0 );
    pthread_mutex_lock( &mutex );
    
    commands[in] = &command;
    in = ( in + 1 ) % max;
    
    pthread_mutex_unlock( &mutex );
    sem_post( &used );
}

const Command &CommandQueue::remove()
{
    while( sem_wait( &used ) != 0 );
    pthread_mutex_lock( &mutex );
    
    const Command *command = commands[out];
    out = ( out + 1 ) % max;
    
    pthread_mutex_unlock( &mutex );
    sem_post( &free );
    
    return *command;
}


class PipelineManager : public Thread {
public:
    PipelineManager();
    void addSource( Format frameType );
    void addDestination( Format frameType );
    void clearTargets();
    void connectTogether(Module *m1, Module *m2, const Frame &f);
    void makeConnections(Module *start);
    void start( Frame *frame ) { Thread::start( (void *)frame ); }
    void execute( void *p );
    void unconnectedRoute( Module *m, const Frame &f );
private:
    list<Module*> sourceModules;
    list<Module*> destinationModules;
    list<Format> source;
    list<Format> destination;
};


PipelineManager *pipelineMgr = 0;



class ModuleFactory {
public:
    ModuleFactory() { }

    virtual const char *name() = 0;
    
    virtual list<Address> threadAffinity() = 0;
    virtual bool isBlocking() = 0;
    virtual Format inputFormat() = 0;
    virtual Format outputFormat() = 0;
    virtual bool supportsInputFormat( Format ) = 0;
    virtual bool supportsOutputFormat( Format ) = 0;
    
    virtual Module *createInstance() = 0;
};



// Modules
class Module {
public:
    Module() { }
    
    virtual const char *name() = 0;
    virtual Format inputFormat() = 0;
    virtual Format outputFormat() = 0;
//    virtual bool constFrameProcessing() = 0;

//    virtual bool supportsInputType( Format ) = 0;
    virtual bool supportsOutputType( Format ) = 0;
    
//    virtual list<int> inputFormats() { list<int> t; t.push_back(FRAME_ID_UNKNOWN); return t; }
//    virtual list<int> outputFormats() { list<int> t; t.push_back(FRAME_ID_UNKNOWN); return t; }

    virtual bool isBlocking() = 0;//{ return false; }
    virtual list<Address> threadAffinity() = 0;

//    virtual void command( Command command, const void *arg, bool priorityFlag ) = 0;
    virtual void command( Commands command, const void *arg ) = 0;
    
    virtual void connectTo( Module *next, const Frame &f ) = 0;
    virtual void connectedFrom( Module *next, const Frame &f ) = 0;
};


class DispatchInterface {
public:
    virtual void dispatch( Command *command ) = 0;
};


/*
class ModulesThread : public Thread, public DispatchInterface {
public:
    void execute( void* )
    {
        for (;;) {
            CommandStruct *command = buffer.remove();
            command->module->command( command->command, command->arg );
        }
    }
    
    void dispatch( CommandStruct *command )
    {
        buffer.add( command );
    }

private:
    CommandQueue buffer;
};
*/

class ProcessBoundryThing : public DispatchInterface {
public:
    void dispatch( Command *command )
    {
    }
};


class ModuleMapper {
public:
    void addModule( Module *module )
    {
        modules.push_back(module);
    }

    void addMapping( Address address, DispatchInterface *dispatcher )
    {
        dispatchAddressMap[address] = dispatcher;
    }

    Module *findModuleWithInputFormat( Format format )
    {
        for ( list<Module *>::iterator it = modules.begin(); it != modules.end(); ++it ) {
            if ( (*it)->inputFormat() == format ) {
                return (*it);
            }
        }
        return 0;
    }

    Module *findModuleWithOutputFormat( Format format )
    {
        for ( list<Module *>::iterator it = modules.begin(); it != modules.end(); ++it ) {
            if ( (*it)->outputFormat() == format ) {
                return (*it);
            }
        }
    }
    
    DispatchInterface *lookup( Address address )
    {
        return dispatchAddressMap[address];
    }

    void dispatchCommand( Address address, Commands command, const void *arg )
    {
        Command *cmd = new Command;
        cmd->command = command;
        cmd->arg = arg;
        cmd->address = address;
//        lookup( cmd->address )->dispatch( cmd ); 
        address->command( cmd->command, cmd->arg ); 
    }

private:
    list<Module*> modules;    
    map<Address,DispatchInterface*> dispatchAddressMap;
    multimap<Format,Module*> inputFormatModuleMap;
    multimap<Format,Module*> outputFormatModuleMap;
};
    

ModuleMapper *moduleMapper()
{
    static ModuleMapper *staticModuleMapper = 0;
    return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper;
}

static void staticDispatch( Address address, Commands command, const void *arg )
{
    moduleMapper()->dispatchCommand( address, command, arg );
}

    
class SimpleModule : public Module {
public:
    SimpleModule() : next( 0 ) { }
    
    bool isBlocking() { return false; }
    list<Address> threadAffinity() { }
    
    bool supportsOutputType(Format type)
    {
        return outputFormat() == type;
    }
    
    virtual void init() = 0;
    
    void command( Commands command, const void *arg )
    {
        switch (command) {
            case Process:
                process( *((Frame *)arg) );
                break;
            case Simulate:
                simulate( *((Frame *)arg) );
                break;
            case Deref:
                ((Frame *)arg)->deref();
                break;
            case Init:
                init();
                break;
        }
    }

    void dispatch( Address address, Commands command, const void *arg )
    {
        if ( address )
            staticDispatch( address, command, arg );
        else if ( pipelineMgr && ( command == Process || command == Simulate ) )
            pipelineMgr->unconnectedRoute( this, *(const Frame *)arg );            
    }

    virtual void derefFrame( Frame *frame )
    {
        dispatch( prev, Deref, frame );
    }
    
    virtual void process( const Frame &frame )
    {
        dispatch( next, Process, &frame );
    }
    
    virtual void simulate( const Frame &frame )
    {
        process( frame );
    }
        
    void connectTo( Address n, const Frame &f )
    { 
        next = n;
    }   
    
    void connectedFrom( Address n, const Frame &f )
    { 
        prev = n;
    }   
    
    Frame *getAvailableFrame()
    {
        Frame *frame;
        list<Frame*>::iterator it;
        for ( it = used.begin(); it != used.end(); ++it ) {
            frame = *it;
            if ( frame->refcount() == 0 ) {
                reuseFrame( frame );
                frame->ref();
                return frame;
            }
        }        
        frame = createNewFrame();
        frame->ref();
        used.push_back( frame );
        return frame;
    }
        
    virtual Frame* createNewFrame()
    { 
        return new Frame;
    }
    
    virtual void reuseFrame( Frame *frame )
    { }

private:
    list<Frame*> used;
    Module *next;
    Module *prev;
};


class RoutingModule : public SimpleModule {
public:
    RoutingModule() { }
    
//    bool supportsOutputType(Format type) { return outputFormat() == type; }
   
    void process( const Frame &frame )
    {
        dispatch( routes[Format(frame.id())], Process, &frame );
    }
    
    void connectTo( Module *next, const Frame &f )
    { 
        setRoute( next->inputFormat(), next );
    }

private:
    void setRoute( Format t, Module* m )
    {
        routes[Format(t)] = m;
    }
    
    map<Format, Module*> routes;
};


class OSSRenderer : public SimpleModule {
public:
    OSSRenderer() { }
    
    void init();
    void process( const Frame &f );

    const char *name() { return "OSS Renderer"; }
    Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; }
    Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; }
    bool isBlocking() { return true; }
    
private:
    int fd;
};


void OSSRenderer::init()
{
    // Initialize OSS
    fd = open( "/dev/dsp", O_WRONLY );
    
    int format = AFMT_S16_LE;
    ioctl( fd, SNDCTL_DSP_SETFMT, &format );
    
    int channels = 2;
    ioctl( fd, SNDCTL_DSP_CHANNELS, &channels );
    
    int speed = 44100;
    ioctl( fd, SNDCTL_DSP_SPEED, &speed );
}

void OSSRenderer::process( const Frame &frame )
{
    // Render PCM to device
    PCMData *pcm = (PCMData*)frame.data();
    if ( write( fd, pcm->data, pcm->size ) == -1 )
        perror( "OSSRenderer::process( Frame )" );
    frame.deref();
}


enum ColorFormat {
    RGB565,
    BGR565,
    RGBA8888,
    BGRA8888
};

class VideoScaleContext {
public:
    AVPicture outputPic1;
    AVPicture outputPic2;
    AVPicture outputPic3;

    VideoScaleContext() {
        //img_convert_init();
	videoScaleContext2 = 0;
	outputPic1.data[0] = 0;
	outputPic2.data[0] = 0;
	outputPic3.data[0] = 0;
    }

    virtual ~VideoScaleContext() {
	free();
    }

    void free() {
	if ( videoScaleContext2 )
	    img_resample_close(videoScaleContext2);
	videoScaleContext2 = 0;
	if ( outputPic1.data[0] )
	    avpicture_free(&outputPic1);
	outputPic1.data[0] = 0;
	if ( outputPic2.data[0] )
	    avpicture_free(&outputPic2);
	outputPic2.data[0] = 0;
	if ( outputPic3.data[0] )
	    avpicture_free(&outputPic3);
	outputPic3.data[0] = 0;
    }

    void init() {
	scaleContextDepth = -1;
	scaleContextInputWidth = -1;
	scaleContextInputHeight = -1;
	scaleContextPicture1Width = -1;
	scaleContextPicture2Width = -1;
	scaleContextOutputWidth = -1;
	scaleContextOutputHeight = -1;
	scaleContextLineStride = -1;
    }

    bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) {
	int colorMode = -1;
	switch ( outFmt ) {
	    case RGB565:   colorMode = PIX_FMT_RGB565; break;
	    case BGR565:   colorMode = PIX_FMT_RGB565; break;
	    case RGBA8888: colorMode = PIX_FMT_RGBA32; break;
	    case BGRA8888: colorMode = PIX_FMT_RGBA32; break;
	};
	scaleContextFormat = fmt;
	scaleContextDepth = colorMode;
	if ( scaleContextInputWidth != w || scaleContextInputHeight != h
				|| scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) {
	    scaleContextInputWidth = w;
	    scaleContextInputHeight = h;
	    scaleContextOutputWidth = outW;
	    scaleContextOutputHeight = outH;
	    scaleContextLineStride = lineStride;
	    free();
	    if ( !(videoScaleContext2 = img_resample_init(outW, outH, w, h)) )
		return false;
	    if ( avpicture_alloc(&outputPic1, PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 )
		return false;
	    if ( avpicture_alloc(&outputPic2, scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 )
		return false;
	    if ( avpicture_alloc(&outputPic3, PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 )
		return false;
	}
	return true;
    }

    void convert(uint8_t *output, AVFrame *picture) {
	if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] )
	    return;

        // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format
        // first conversion needed because img_resample assumes YUV420P, doesn't seem to
        // behave with packed image formats

        img_convert(&outputPic1, PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight);
        
        img_resample(videoScaleContext2, &outputPic3, &outputPic1);
	
        img_convert(&outputPic2, scaleContextDepth, &outputPic3, PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight);
        
        //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture);
	//img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight);

	int offset = 0;
	for ( int i = 0; i < scaleContextOutputHeight; i++ ) {
	    memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] );
	    output += scaleContextLineStride;
	    offset += outputPic2.linesize[0]; 
	}
    }

private:
    ImgReSampleContext *videoScaleContext2;
    int scaleContextDepth;
    int scaleContextInputWidth;
    int scaleContextInputHeight;
    int scaleContextPicture1Width;
    int scaleContextPicture2Width;
    int scaleContextOutputWidth;
    int scaleContextOutputHeight;
    int scaleContextLineStride;
    int scaleContextFormat;
};


struct FFMpegStreamPacket {
    AVPacket *packet;
};


class MpegEncodeModule : public SimpleModule {
public:
    MpegEncodeModule() : videoCodecContext( 0 )
    {           
    }
    
    void init()
    {        
printf("S %i\n", __LINE__);
        av_register_all();
        
        videoCodecContext = avcodec_alloc_context();

        AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO);
        assert( codec );

/*        
        if ( avcodec_open( videoCodecContext,  &mpeg1video_encoder ) < 0 ) {
            printf("error opening context\n");
            videoCodecContext = 0;
        }
*/

/*
        videoCodecContext->bit_rate = 400000;
        videoCodecContext->gop_size = 10;
        videoCodecContext->max_b_frames = 1;
*/
        videoCodecContext->width = WIDTH;
        videoCodecContext->height = HEIGHT;
        videoCodecContext->frame_rate = 25;
        videoCodecContext->frame_rate_base= 1;
        videoCodecContext->pix_fmt=PIX_FMT_YUV420P;
        videoCodecContext->codec_type = CODEC_TYPE_VIDEO;
        videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO;
        
        assert( avcodec_open( videoCodecContext, codec ) >= 0 );
        
printf("S %i\n", __LINE__);
    }

    void process( const Frame &frame ) 
    {
printf("T %i\n", __LINE__);
        YUVFrame *yuvFrame = (YUVFrame*)frame.data();
        AVFrame *picture = yuvFrame->pic;

        if ( !videoCodecContext ) {
            printf("can't process video data without a context\n");
            return;
        }
        
        Frame *f = getAvailableFrame();
        
        FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data();
        AVPacket *packet = ffmpeg->packet;
        
printf("T %i\n", __LINE__);

// 160*120*4 = 76800

        printf(" %i x %i   %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); 
        
        AVFrame tmpPic;
        if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 )
            printf("blah1\n");
        img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt,
                                     yuvFrame->width, yuvFrame->height );
        
        printf(" %i x %i   %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); 
        
        static int64_t pts = 0;
        tmpPic.pts = AV_NOPTS_VALUE;
        pts += 5000;
         
//        int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic );
        packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic );
        
        if ( videoCodecContext->coded_frame ) {
            packet->pts = videoCodecContext->coded_frame->pts;
            if ( videoCodecContext->coded_frame->key_frame )
                packet->flags |= PKT_FLAG_KEY;
        }
        
printf("T %i\n", __LINE__);
        
        cerr << "encoded: " << packet->size << " bytes" << endl;
printf("T %i\n", __LINE__);

        frame.deref();
        
        SimpleModule::process( *f );
    }
    
    Frame* createNewFrame()
    { 
        FFMpegStreamPacket *packet = new FFMpegStreamPacket;
        packet->packet = new AVPacket;
        packet->packet->data = new unsigned char[65536];
        packet->packet->size = 65536;
        packet->packet->pts = AV_NOPTS_VALUE;
        packet->packet->flags = 0;
        return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet );
    }
    
    void reuseFrame( Frame *frame )
    {
        FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data();
        packet->packet->size = 65536;
        packet->packet->pts = AV_NOPTS_VALUE;
        packet->packet->flags = 0;
        //av_free_packet( packet->packet );
        //delete packet->packet;
    }
    
    const char *name() { return "Mpeg Video Encoder"; }
    Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; }
    Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; }
    bool isBlocking() { return true; }

private:
    AVCodecContext *videoCodecContext;
};


class FFMpegMuxModule : public SimpleModule {
public:
    FFMpegMuxModule() : outputFileContext( 0 )
    {           
    }
    
    void init()
    {
printf("A %i\n", __LINE__);
        av_register_all();
        
        outputFileContext = av_alloc_format_context();
        outputFileContext->oformat = guess_format("avi", 0, 0);
        AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 );
        //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 );
printf("A %i\n", __LINE__);

        assert( videoStream );
        assert( outputFileContext->oformat );

        AVCodecContext *video_enc = &videoStream->codec;
        
        AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO);
        assert( codec );
        assert( avcodec_open( video_enc, codec ) >= 0 );
        
        video_enc->codec_type = CODEC_TYPE_VIDEO;
        video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P
//        video_enc->bit_rate = video_bit_rate;
//        video_enc->bit_rate_tolerance = video_bit_rate_tolerance;

        video_enc->frame_rate = 10;//25;//frame_rate; 
        video_enc->frame_rate_base = 1;//frame_rate_base; 
        video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft;
        video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom;

        video_enc->pix_fmt = PIX_FMT_YUV420P;
        
        if( av_set_parameters( outputFileContext, NULL ) < 0 ) {
            cerr << "Invalid output format parameters\n";
         exit(1);
        }
    
printf("A %i\n", __LINE__);
//        strcpy( outputFileContext->comment, "Created With Project Carmack" );
//        strcpy( outputFileContext->filename, "blah.avi" );
        
//        if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) {
        if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) {
            printf( "Couldn't open output file: %s\n", outputFileContext->filename );
            exit( 1 );
        }
printf("A %i\n", __LINE__);

        if ( av_write_header( outputFileContext ) < 0 ) {
            printf( "Could not write header for output file %s\n", outputFileContext->filename );
            exit( 1 );
        }

printf("A %i\n", __LINE__);
    }

    void process( const Frame &frame ) 
    {
printf("B %i\n", __LINE__);
        AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet;
        //av_dup_packet( pkt ); 

        if ( !outputFileContext ) {
            printf("can't process video data without a context\n");
            return;
        }
        
/*
            pkt.stream_index= ost->index;
            pkt.data= audio_out;
            pkt.size= ret;
            if(enc->coded_frame)
                pkt.pts= enc->coded_frame->pts;
            pkt.flags |= PKT_FLAG_KEY;
*/          
printf("B %i\n", __LINE__);
        if ( pkt->data ) {
printf("B %i\n", __LINE__);
            av_interleaved_write_frame(outputFileContext, pkt);
        } else {
            printf( "End of data\n" );
            av_write_trailer(outputFileContext);
            exit( 0 );
        }
printf("B %i\n", __LINE__);
        
        frame.deref();
    }
    
    const char *name() { return "AVI Muxer"; }
    Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; }
    Format outputFormat() { return "FRAME_ID_URL_SINK"; }
    bool isBlocking() { return true; }

private:
    AVFormatContext *outputFileContext;
};

class Splitter : public SimpleModule {
public:
    Splitter()
    {           
    }
  
    void init()
    {    
    }
   
    void process( const Frame &frame )
    {
        list<Module*>::iterator it = routes.begin();
        while( it != routes.end() ) {
            if ( it != routes.begin() )
                frame.ref();
            dispatch( (*it), Process, &frame );
            ++it;
        }
    }
    
    void connectTo( Module *next, const Frame &f )
    { 
        routes.push_back( next );
    }

    const char *name() { return "Splitter"; }
    Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; }
    Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; }
    bool isBlocking() { return true; }

private:
    list<Module*> routes;
};

class MpegDecodeModule : public SimpleModule {
public:
    MpegDecodeModule() : videoCodecContext( 0 )
    {           
        currentFrame = 0;
    }
    
    void init()
    {        
        av_register_all();
        
        if ( avcodec_open( videoCodecContext = avcodec_alloc_context(),  &mpeg1video_decoder ) < 0 ) {
            printf("error opening context\n");
            videoCodecContext = 0;
        }
    }

    void process( const Frame &frame ) 
    {
        AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet;

        if ( !videoCodecContext ) {
            printf("can't process video data without a context\n");
            return;
        }
        
        if ( !currentFrame )
            currentFrame = getAvailableFrame();
        
        YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data();
        AVFrame *picture = yuvFrame->pic;
        
        assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P );
        
//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height);
        AVPacket *mpeg = pkt;
	unsigned char *ptr = (unsigned char*)mpeg->data;
        int count = 0, ret = 0, gotPicture = 0;
        // videoCodecContext->hurry_up = 2;
        int len = mpeg->size;
//        for ( ; len && ret >= 0; len -= ret, ptr += ret )
            ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len );
        frame.deref();

        if ( gotPicture ) {
            yuvFrame->width = videoCodecContext->width;
            yuvFrame->height = videoCodecContext->height;
            yuvFrame->fmt = videoCodecContext->pix_fmt;
            SimpleModule::process( *currentFrame );
            currentFrame = 0;            
        }
    }
    
    Frame* createNewFrame()
    {
        YUVFrame *yuvFrame = new YUVFrame;
        yuvFrame->pic = avcodec_alloc_frame();
        return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame );
    }
    
    void reuseFrame( Frame *frame )
    {
        YUVFrame *yuvFrame = (YUVFrame *)frame->data();
        av_free( yuvFrame->pic );
        yuvFrame->pic = avcodec_alloc_frame();
    }
    
    const char *name() { return "Mpeg1 Video Decoder"; }
    Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; }
    Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; }
    bool isBlocking() { return true; }

private:
    Frame *currentFrame;
    AVCodecContext *videoCodecContext;
};


class MP3DecodeModule : public SimpleModule {
public:
    MP3DecodeModule() : audioCodecContext( 0 )
    {
    }
    
    void init()
    {
        av_register_all();
        
        if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) {
            printf("error opening context\n");
            audioCodecContext = 0;
        }
    }
    
    void process( const Frame &frame ) 
    {
        AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet;

        Frame *f = getAvailableFrame();
        PCMData *pcm = (PCMData *)f->data();
        int count = 0, ret = 0, bytesRead;
        AVPacket *mp3 = pkt;
	unsigned char *ptr = (unsigned char*)mp3->data;
        for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) {
            ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len);
            if ( bytesRead > 0 )
                count += bytesRead;
	}
        frame.deref();

        pcm->size = count;
        SimpleModule::process( *f );
    }
    
    Frame* createNewFrame()
    {
        return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData );
    }
    
    const char *name() { return "MP3 Decoder"; }
    Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; }
    Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; }
    bool isBlocking() { return true; }

private:
    AVCodecContext *audioCodecContext;
};


class MP3SourceModule : public SimpleModule {
public:
    MP3SourceModule() : avFormatContext( 0 )
    {
    }

    void init()
    {
	av_register_all();
    }
    
    void process( const Frame &frame ) {
        printf("file: %s\n", (char*)frame.data());
        if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext )
            printf("error opening file"); 
    
        while( avFormatContext ) {
            if ( av_read_packet(avFormatContext, &pkt) < 0 )
                printf("error reading packet\n");
            else {
                SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) );
            }
        }
    }

    const char *name() { return "MP3 Reader"; }
    Format inputFormat() { return "FRAME_ID_URL_SOURCE"; }
    Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; }
    bool isBlocking() { return true; }

private:
    AVPacket pkt;
    AVFormatContext *avFormatContext;
};


void ProcessMessages();


class FFMpegSourceModule : public SimpleModule {
public:
    FFMpegSourceModule() : avFormatContext( 0 )
    {
    }

    bool supportsOutputType( Format type )
    {
        return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET";
    }
    
    const char* name() { return "FFMpeg Demuxer Source"; }
    Format inputFormat() { return "FRAME_ID_URL_SOURCE"; }
    Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; }
    bool isBlocking() { return true; }
    list<Module*> threadAffinity() { }

    void init()
    {
	av_register_all();
    }
    
    void process( const Frame &frame )
    {
        printf("file: %s\n", (char*)frame.data());
        
        // Open file
        if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) {
            printf("error opening file");
            return;
        }
        
        frame.deref();
        
        // Gather stream information
        if ( av_find_stream_info(avFormatContext) < 0 ) {
            printf("error getting stream info\n");
            return;
        }
   
        while( avFormatContext ) {
            AVPacket *pkt = new AVPacket;
//            if ( av_read_packet(avFormatContext, pkt) < 0 ) {
            if ( av_read_frame(avFormatContext, pkt) < 0 ) {
                printf("error reading packet\n");
                av_free_packet( pkt );
                delete pkt;
                exit( 0 ); // EOF ?
            } else {
                AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec;
                Frame *f = getAvailableFrame( context->codec_type );                
                if ( !f )
                    continue;
                FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data();
                packet->packet = pkt;
                //av_dup_packet( pkt );

		ProcessMessages();
                
                dispatch( routes[pkt->stream_index], Process, f );
            }
        }
        exit( 0 );
    }

    Frame *getAvailableFrame( int type )
    {       
        Frame *frame;
        list<Frame*>::iterator it;
        for ( it = used[type].begin(); it != used[type].end(); ++it ) {
            frame = *it;
            if ( frame->refcount() == 0 ) {
                reuseFrame( frame );
                frame->ref();
                return frame;
            }
        }
        
        // Create new frame
        frame = createNewFrame( type );
        if ( frame ) {
            frame->ref();
            used[type].push_back( frame );
        }
        return frame;
    }
    
    Frame* createNewFrame( int type )
    { 
        FFMpegStreamPacket *packet = new FFMpegStreamPacket;
        switch( type ) {
            case CODEC_TYPE_AUDIO:
                return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet );
            case CODEC_TYPE_VIDEO:
                return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet );
        }
        return 0;
    }
    
    void reuseFrame( Frame *frame )
    {
        FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data();
        av_free_packet( packet->packet );
        delete packet->packet;
    }

    void connectTo( Module *next, const Frame &f )
    {
        routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next;
    }

private:    
    AVFormatContext *avFormatContext;
    map<int,list<Frame*> > used;    
    map<int, Module*> routes;
};


/*
class VideoCameraSourceModule : public SimpleModule {
public:
    VideoCameraSourceModule()
    {
    }

    void init()
    {
        av_register_all();
    }
    
    void process( const Frame &frame ) {
        AVFormatContext *avFormatContext = 0;
        AVFormatParameters vp1, *vp = &vp1;
        AVInputFormat *fmt1;
        memset(vp, 0, sizeof(*vp));
        fmt1 = av_find_input_format("video4linux");//video_grab_format);
        vp->device = 0;//"/dev/video";//video_device;
        vp->channel = 0;//video_channel;
        vp->standard = "pal";//"ntsc";//video_standard;
        vp->width = WIDTH;
        vp->height = HEIGHT;
        vp->frame_rate = 50;
        vp->frame_rate_base = 1;
        if (av_open_input_file(&avFormatContext, "", fmt1, 0, vp) < 0) {
            printf("Could not find video grab device\n");
            exit(1);
        }
        if ((avFormatContext->ctx_flags & AVFMTCTX_NOHEADER) && av_find_stream_info(avFormatContext) < 0) {
            printf("Could not find video grab parameters\n");
            exit(1);
        }
        // Gather stream information
        if ( av_find_stream_info(avFormatContext) < 0 ) {
            printf("error getting stream info\n");
            return;
        }
        
//        AVCodecContext *videoCodecContext = avcodec_alloc_context();
        AVCodecContext *videoCodecContext = &avFormatContext->streams[0]->codec;
        AVCodec *codec = avcodec_find_decoder(avFormatContext->streams[0]->codec.codec_id);
        
        if ( !codec ) {
            printf("error finding decoder\n");
            return;
        }
        
        printf("found decoder: %s\n", codec->name);
        
        avFormatContext->streams[0]->r_frame_rate      = vp->frame_rate;
        avFormatContext->streams[0]->r_frame_rate_base = vp->frame_rate_base;
        
        videoCodecContext->width = vp->width;
        videoCodecContext->height = vp->height;

//        if ( avcodec_open( videoCodecContext, &rawvideo_decoder ) < 0 ) {
        if ( avcodec_open( videoCodecContext, codec ) < 0 ) {  // is rawvideo_decoder
            printf("error opening context\n");
            videoCodecContext = 0;
        }
        
        if ( !videoCodecContext ) {
            printf("can't process video data without a context\n");
            return;
        }
    
        AVPacket pkt;
        while( avFormatContext ) {
            if ( av_read_frame(avFormatContext, &pkt) < 0 )
                printf("error reading packet\n");
            else {            
                AVFrame *picture = avcodec_alloc_frame();
                YUVFrame *yuvFrame = new YUVFrame;
                yuvFrame->pic = picture;
                Frame *currentFrame = new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame );
                currentFrame->ref();
                
                int gotPicture = 0;
                avcodec_decode_video( videoCodecContext, picture, &gotPicture, pkt.data, pkt.size );

                if ( gotPicture ) {
                    yuvFrame->fmt = videoCodecContext->pix_fmt; // is PIX_FMT_YUV422
                    yuvFrame->width = videoCodecContext->width;
                    yuvFrame->height = videoCodecContext->height;
//                    printf("showing frame: %i %ix%i\n", yuvFrame->fmt, yuvFrame->width, yuvFrame->height );
                    SimpleModule::process( *currentFrame );
                }            
            }
        }
    }

    const char *name() { return "Video Camera Source"; }
    Format inputFormat() { return "FRAME_ID_VIDEO_CAMERA_SOURCE"; }
    Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; }
    bool isBlocking() { return true; }
};
*/

/*
class Consumer : public RoutingModule {
public:
    Consumer( CommandQueue* b, Format format )
        : RoutingModule(), buffer( b ), formatId( format )
    { }
    
    void init()
    {
    }
    
    void start()
    {
        for (;;) {
            const Command &command = buffer->remove();
            RoutingModule::command( command.command, command.arg );
        }
    }
    
    const char* name() { return "Consumer"; }
    Format inputFormat() { return formatId; }
    Format outputFormat() { return formatId; }

private:
    CommandQueue *buffer;
    Format formatId;
};

class ConsumerThread : public Thread {
public:
    ConsumerThread( Consumer *c )
        : consumer( c )
    { }
    
    void execute( void* )
    {
        consumer->start();
    }
    
private:
    Consumer *consumer;
};


class ThreadBoundryModule : public RoutingModule {
public:
    ThreadBoundryModule( int size, Format format )
        : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ),
            consumerThread( &consumer ), formatId( format )
    {
    }

    void init()
    {
    }

    void connectTo( Module *m, const Frame &f )
    {
        consumer.connectTo( m, f );
        consumerThread.start(0);
    }

    void process( const Frame &frame )
    {
        readCommandQueue.add( frame );
    }
    
    const char *name() { return "Thread Boundry Module"; }
    Format inputFormat() { return formatId; }
    Format outputFormat() { return formatId; }
    
private:
    CommandQueue readCommandQueue;
    Consumer consumer;
    ConsumerThread consumerThread;
    Format formatId;
};
*/

PipelineManager::PipelineManager()
{
}

/*
void PipelineManager::newModule( Module *m )
{
    printf("adding module: %s\n", m->name() );

    allModules.push_front( m );

    // update source modules list
    for ( list<Format>::iterator it = source.begin(); it != source.end(); ++it ) {
        if ( (*it) == m->inputFormat() ) {
            sourceModules.push_front( m );
            // Just add it once
            break;
        }
    }
    
    // update destination modules list
    for ( list<Format>::iterator it = destination.begin(); it != destination.end(); ++it ) {
        if ( (*it) == m->outputFormat() ) {
            destinationModules.push_front( m );
            // Just add it once
            break;
        }
    }
}
*/

void PipelineManager::addSource( Format frameType )
{
    // update source modules list
    Module *m = moduleMapper()->findModuleWithInputFormat( frameType );    
    if ( m ) {
        printf("adding source: %s\n", (const char *)frameType );
        source.push_front( frameType );
        sourceModules.push_front( m );
    } else {
        printf("No source for %s found!!!\n", (const char *)frameType );
    }
}

void PipelineManager::addDestination( Format frameType )
{
    Module *m = moduleMapper()->findModuleWithOutputFormat( frameType );    
    if ( m ) {
        printf("adding destination: %s\n", (const char *)frameType );
        destination.push_front( frameType );
        destinationModules.push_front( m );
    } else {
        printf("No destination for %s found!!!\n", (const char *)frameType );
    }
}

void PipelineManager::clearTargets()
{
    sourceModules.clear();
    destinationModules.clear();
    source.clear();
    destination.clear();
}
    
void PipelineManager::connectTogether( Module *m1, Module *m2, const Frame &f )
{
/*
    //printf(" [%s] -> [%s] %s", m1->outputFormat(), m2->inputFormat(), m2->name() );
    printf(" -> %s", m2->name() );
    
    staticDispatch( m2, Init, 0 );

    if ( m2->isBlocking() || m1->isBlocking() ) {
        ThreadBoundryModule *threadModule = new ThreadBoundryModule( 32, m2->inputFormat() );
        threadModule->init();
        m1->connectTo( threadModule, f );
        threadModule->connectTo( m2, f );
    } else {
        m1->connectTo( m2, f );
    }
*/
}

/*
    Connects together module with a module that can process the frame
    and then gets the module to process this first frame
*/
void PipelineManager::unconnectedRoute( Module *m, const Frame &f )
{
    Module *m2 = moduleMapper()->findModuleWithInputFormat( f.id() );
    if ( m2 ) {    
        //connectTogether( m, m2, f );
        printf("Connecting together: %s -> %s\n", m->name(), m2->name() );
        staticDispatch( m2, Init, 0 );
        m->connectTo( m2, f );
        m2->connectedFrom( m, f );
        staticDispatch( m2, Process, &f );
    } else {
        printf("Didn't find route for %s\n", m->name());
    }
}

void PipelineManager::makeConnections( Module *start )
{
/*
    printf("making connections:\n");

    Frame frame( "UNKNOWN", 0 );
    Module *currentModule = start;
    Format dstFmt = destination.front();
    
    dispatch( currentModule, Init, 0 );
    printf("  %s (pid: %i)", currentModule->name(), getpid() );
    
    while ( currentModule->outputFormat() != dstFmt ) {       
        Module *m = moduleMapper()->findModuleWithInputFormat( currentModule->outputFormat() );
        if ( m ) {
            connectTogether( currentModule, m, frame );
            currentModule = m;
        } else {
            break;
        }
    }    
    printf("\n");
*/
}


void PipelineManager::execute( void *d )
{
    printf("starting...\n");
    for ( list<Module *>::iterator it = sourceModules.begin(); it != sourceModules.end(); ++it ) {
        //makeConnections( (*it) );
        staticDispatch( (*it), Init, 0 );
        staticDispatch( (*it), Process, d );
    }
}









#include <windows.h>
#include <ddraw.h>

enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW };

// Generic Global Variables
HWND                 MainWnd_hWnd;
HINSTANCE	     g_hInstance;
HDC                  hdc;
HPALETTE             oldhpal;
RECT                 r;

// DirectDraw specific Variables
LPDIRECTDRAW         lpDD = NULL;
LPDIRECTDRAWSURFACE  lpDDSPrimary = NULL;  // DirectDraw primary surface
LPDIRECTDRAWSURFACE  lpDDSOne = NULL;      // Offscreen surface #1
DDSURFACEDESC        ddsd;

// Standard Windows API specific Variables
HDC                  hdcMemory;
HBITMAP              hbmpMyBitmap, hbmpOld;

// User decided variables
int                  _method__;   // API or DirectDraw
int                  _do_full_;   // Full screen
int                  _do_flip_;   // Page flipping
int                  _double__;   // Double window size
int                  _on_top__;   // Always on top
int                  _rate____;   // Calculate frame rate

// Interface Variables
unsigned char       *DoubleBuffer;

// Resolution Variables
int                  width;
int                  height;
int                  bytes_per_pixel;


#define fatal_error(message)   _fatal_error(message, __FILE__, __LINE__)
void _fatal_error(char *message, char *file, int line);

// Fatal error handler (use the macro version in header file)
void _fatal_error(char *message, char *file, int line)
{
	char error_message[1024];
	sprintf(error_message, "%s, in %s at line %d", message, file, line);
	puts(error_message);
	MessageBox(NULL, error_message, "Fatal Error!", MB_OK);
	exit(EXIT_FAILURE);
}


class MSWindowsWindow {
};


class DirectDrawWindow {
};


// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery)
void MyShowDoubleBuffer(void)
{
	if (_method__ == USE_DIRECT_DRAW) {

		if (_do_flip_) {
			// Page flipped DirectDraw
			if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK)
				fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)");
			DoubleBuffer = (unsigned char *)ddsd.lpSurface;
			if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK)
				fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)");

			if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) {
				IDirectDrawSurface_Restore(lpDDSPrimary);
				IDirectDrawSurface_Restore(lpDDSOne);
			}

		} else {
			// Non Page flipped DirectDraw
			POINT pt;
			HDC   hdcx;
			ShowCursor(0);

			if (_do_full_) {
				if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST)
					IDirectDrawSurface_Restore(lpDDSPrimary),
						IDirectDrawSurface_Restore(lpDDSOne);
			} else {
				GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt);
				ReleaseDC(MainWnd_hWnd, hdcx);
				IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY);
			}

			ShowCursor(1);
		}
	} else {
		// Using Windows API
		// BltBlt from memory to screen using standard windows API
		SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer);
		if (_double__)
			StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY);
		else
			BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY);
	}
}

int done = 0;

// Shut down application
void MyCloseWindow(void)
{
	if (done == 0)
	{
		done = 1;

		if (_method__ == USE_DIRECT_DRAW) {
			ShowCursor(1);
			if(lpDD != NULL) {
				if(lpDDSPrimary != NULL)
					IDirectDrawSurface_Release(lpDDSPrimary);
				if (!_do_flip_)
					if(lpDDSOne != NULL)
						IDirectDrawSurface_Release(lpDDSOne);
				IDirectDrawSurface_Release(lpDD);
			}
			lpDD = NULL;
			lpDDSOne = NULL;
			lpDDSPrimary = NULL;
		} else {
			/* release buffer */
			free(DoubleBuffer);
			// Release interfaces to BitBlt functionality
			SelectObject(hdcMemory, hbmpOld);
			DeleteDC(hdcMemory);
		}
		ReleaseDC(MainWnd_hWnd, hdc);
		PostQuitMessage(0);

	}
}

// Do the standard windows message loop thing
void MyDoMessageLoop(void)
{
	MSG msg;
	while(GetMessage(&msg, NULL, 0, 0 ))
	{
		TranslateMessage(&msg);
		DispatchMessage(&msg);
	}
	exit(msg.wParam);
}


void ProcessMessages()
{
	MSG msg;
	while (PeekMessage(&msg, NULL, 0, 0, 1 ))
	{
		TranslateMessage(&msg);
		DispatchMessage(&msg);
	}
}



LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam)
{
	if ( iMessage == WM_SIZE ) {
		width = lParam & 0xFFFF;
		height = (lParam >> 16) + 4;
		printf("resize: %i x %i   (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16);
	}
	return DefWindowProc(hWnd, iMessage, wParam, lParam);
}



// Setup the application
void MyCreateWindow()
{
	DDSCAPS       ddscaps;
	WNDCLASS      wndclass; // Structure used to register Windows class.
	HINSTANCE     hInstance = 0;//g_hInstance;

	wndclass.style         = 0;
	wndclass.lpfnWndProc   = WndProc;//DefWindowProc;
	wndclass.cbClsExtra    = 0;
	wndclass.cbWndExtra    = 0;
	wndclass.hInstance     = hInstance;
	wndclass.hIcon         = LoadIcon(hInstance, "3D-MAGIC");
	wndclass.hCursor       = LoadCursor(NULL, IDC_ARROW);
	wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH);
	wndclass.lpszMenuName  = NULL;
	wndclass.lpszClassName = "DDraw Renderer Module";

	if (!RegisterClass(&wndclass))
		fatal_error("Error Registering Window");

	if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player",
					WS_OVERLAPPEDWINDOW | WS_VISIBLE,  /* Window style.     */
					CW_USEDEFAULT, CW_USEDEFAULT,      /* Default position. */

					// take into account window border, and create a larger
					// window if stretching to double the window size.
					(_double__) ? 2*width + 10 : width + 10,
					(_double__) ? 2*height + 30 : height + 30,
					NULL, NULL, hInstance, NULL)))
		fatal_error("Error Creating Window");

	hdc = GetDC(MainWnd_hWnd);

	r.left   = 0;
	r.top    = 0;
	r.right  = width;
	r.bottom = height;

	if (_method__ == USE_DIRECT_DRAW)
	{
		if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK)
			fatal_error("Error initialising DirectDraw (DDCreate)");

		if (_do_full_)
		{
			if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK)
				fatal_error("Error initialising DirectDraw (DDSetCoopLevel)");
			if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK)
				fatal_error("Error initialising DirectDraw (DDSetDisplayMode)");
		}
		else
		{
			if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK)
				fatal_error("Error initialising DirectDraw (DDSetCoopLevel)");

			_do_flip_ = 0;
			bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3;
		}

		if (_do_flip_)
		{
			ddsd.dwSize = sizeof(ddsd);
			ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT;
			ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX;
			ddsd.dwBackBufferCount = 1;
			if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK)
				fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)");

			// Get the pointer to the back buffer
			ddscaps.dwCaps = DDSCAPS_BACKBUFFER;
			if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK)
				fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)");
		}
		else
		{
			ddsd.dwSize = sizeof(ddsd);
			ddsd.dwFlags=DDSD_CAPS;
			ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE;
			if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK)
				fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)");

			ddsd.dwSize=sizeof(ddsd);
			ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH;
			ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN;
			ddsd.dwWidth=width;
			ddsd.dwHeight=height;
			if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK)
				fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)");

			if (lpDDSOne == NULL)
				fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)");
		}

		// Get pointer to buffer surface
		if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK)
			fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)");
		DoubleBuffer = (unsigned char *)ddsd.lpSurface;
		if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK)
			fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)");

		if (_do_flip_)
			ShowCursor(0);
	}
	else /* Windows API */
	{
		bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3;

		DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel);
		if (DoubleBuffer == NULL)
			fatal_error("Unable to allocate enough main memory for an offscreen Buffer");

		// Initialise interface to BitBlt function
		hdcMemory = CreateCompatibleDC(hdc);
		hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height);
		hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap);

		{
			HPALETTE hpal;
			PALETTEENTRY mypal[64*3+16];
			int i;
			LOGPALETTE *plgpl;

			plgpl = (LOGPALETTE*) LocalAlloc(LPTR,
					sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY));

			plgpl->palNumEntries = 64*3+16;
			plgpl->palVersion = 0x300;

			for (i = 16; i < 64+16; i++)
			{
				plgpl->palPalEntry[i].peRed    = mypal[i].peRed    = LOBYTE(i << 2);
				plgpl->palPalEntry[i].peGreen  = mypal[i].peGreen  = 0;
				plgpl->palPalEntry[i].peBlue   = mypal[i].peBlue   = 0;
				plgpl->palPalEntry[i].peFlags  = mypal[i].peFlags  = PC_RESERVED;

				plgpl->palPalEntry[i+64].peRed    = mypal[i+64].peRed    = 0;
				plgpl->palPalEntry[i+64].peGreen  = mypal[i+64].peGreen  = LOBYTE(i << 2);
				plgpl->palPalEntry[i+64].peBlue   = mypal[i+64].peBlue   = 0;
				plgpl->palPalEntry[i+64].peFlags  = mypal[i+64].peFlags  = PC_RESERVED;

				plgpl->palPalEntry[i+128].peRed    = mypal[i+128].peRed    = 0;
				plgpl->palPalEntry[i+128].peGreen  = mypal[i+128].peGreen  = 0;
				plgpl->palPalEntry[i+128].peBlue   = mypal[i+128].peBlue   = LOBYTE(i << 2);
				plgpl->palPalEntry[i+128].peFlags  = mypal[i+128].peFlags  = PC_RESERVED;
			}

			hpal = CreatePalette(plgpl);
			oldhpal = SelectPalette(hdc, hpal, FALSE);

			RealizePalette(hdc);

		}

	}
}



class DirectDrawRenderer : public SimpleModule {
	public:
		DirectDrawRenderer() {
			width = 320 + 32;
			height = 240;
			_method__ = 0;   // API or DirectDraw
			_do_full_ = 0;   // Full screen
			_do_flip_ = 0;   // Page flipping
			_double__ = 0;   // Double window size
			_on_top__ = 0;   // Always on top
			_rate____ = 0;   // Calculate frame rate
		}
		void init() {
			MyCreateWindow();
		}
		void process( const Frame &f ) {
			const Frame *frame = &f;
			if ( frame && frame->refcount() ) {


//printf("width: %i height: %i\n", width, height);


		free(DoubleBuffer);
		SelectObject(hdcMemory, hbmpOld);
		DeleteDC((HDC)hbmpMyBitmap);
		//DeleteDC(hdcMemory);
		
		DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel);
		if (DoubleBuffer == NULL)
			fatal_error("Unable to allocate enough main memory for an offscreen Buffer");
		
		// Initialise interface to BitBlt function
		hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height);
		hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap);


				YUVFrame *picture = (YUVFrame *)frame->data();
				if (!videoScaleContext.configure(picture->width, picture->height, width, height,
							picture->pic, width * 4, picture->fmt, RGBA8888))
					return;
				videoScaleContext.convert(DoubleBuffer, picture->pic);
				MyShowDoubleBuffer();
				frame->deref();
			}
		}
		const char *name() { return "YUV Renderer"; }
		Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; }
		Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; }
		bool isBlocking() { return true; }
	private:
		VideoScaleContext videoScaleContext;
};








Module *a, *b, *c, *d;


void registerModules()
{
    moduleMapper()->addModule( new OSSRenderer );
//    moduleMapper()->addModule( d = new YUVRenderer );
    moduleMapper()->addModule( d = new DirectDrawRenderer );
    moduleMapper()->addModule( new MP3DecodeModule );
//    moduleMapper()->addModule( new FFMpegMuxModule );
    moduleMapper()->addModule( new MpegDecodeModule );
//    moduleMapper()->addModule( new MP3SourceModule );
//    moduleMapper()->addModule( new StreamDemuxModule );
    moduleMapper()->addModule( c = new MpegEncodeModule );
//    moduleMapper()->addModule( b = new Splitter );
    moduleMapper()->addModule( new FFMpegSourceModule );
//    moduleMapper()->addModule( a = new VideoCameraSourceModule );
}

void playFile( const char *filename )
{
    pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" );
    pipelineMgr->addDestination( "FRAME_ID_RENDERED_AUDIO" );
    pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" );

    int length = strlen(filename) + 1;
    Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) );
    file.ref();
    
    //pipelineMgr->start( &file );
    pipelineMgr->execute( &file );
}


void displayCamera()
{
    pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" );
    pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" );
    pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) );
}

void reEncodeFile( const char *filename )
{
    pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" );
    pipelineMgr->addDestination( "FRAME_ID_URL_SINK" );

    int length = strlen(filename) + 1;
    Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) );
    file.ref();
    
    pipelineMgr->start( &file );
}

void recordVideo()
{
    pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" );
    pipelineMgr->addDestination( "FRAME_ID_URL_SINK" );
    pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" );
    pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) );
}

int main( int argc, char** argv )
{
    registerModules();
    pipelineMgr = new PipelineManager;
/*    
    Frame f;
    printf("Connecting together: %s -> %s\n", a->name(), b->name() );
    staticDispatch( b, Init, 0 );
    a->connectTo( b, f );
//    b->connectedFrom( a, f );
    
    printf("Connecting together: %s -> %s\n", b->name(), c->name() );
    staticDispatch( c, Init, 0 );
    b->connectTo( c, f );
    
    printf("Connecting together: %s -> %s\n", b->name(), d->name() );
    staticDispatch( d, Init, 0 );
    b->connectTo( d, f );
*/
    playFile( (argc > 1) ? argv[1] : "test.mpg" );
    //reEncodeFile( (argc > 1) ? argv[1] : "test.mpg" );
    //displayCamera();
    //recordVideo();
}

