diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/Modules/ThreadBoundaryModule.cpp b/research/pipeline/Modules/ThreadBoundaryModule.cpp new file mode 100644 index 0000000..e4b07d4 --- /dev/null +++ b/research/pipeline/Modules/ThreadBoundaryModule.cpp @@ -0,0 +1,89 @@ + +/* + +class Consumer : public RoutingModule { +public: + Consumer( CommandQueue* b, Format format ) + : RoutingModule(), buffer( b ), formatId( format ) + { } + + void init() + { + } + + void start() + { + for (;;) { + const Command &command = buffer->remove(); + RoutingModule::command( command.command, command.arg ); + } + } + + const char* name() { return "Consumer"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue *buffer; + Format formatId; +}; + +class ConsumerThread : public Thread { +public: + ConsumerThread( Consumer *c ) + : consumer( c ) + { } + + void execute( void* ) + { + consumer->start(); + } + +private: + Consumer *consumer; +}; + + +class ThreadBoundryModule : public RoutingModule { +public: + ThreadBoundryModule( int size, Format format ) + : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ), + consumerThread( &consumer ), formatId( format ) + { + } + + void init() + { + } + + void connectTo( Module *m, const Frame &f ) + { + consumer.connectTo( m, f ); + consumerThread.start(0); + } + + void process( const Frame &frame ) + { + readCommandQueue.add( frame ); + } + + const char *name() { return "Thread Boundry Module"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue readCommandQueue; + Consumer consumer; + ConsumerThread consumerThread; + Format formatId; +}; + + +class ProcessBoundryThing : public DispatchInterface { +public: + void dispatch( Command *command ) + { + } +}; + +*/ diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/Modules/ThreadBoundaryModule.cpp b/research/pipeline/Modules/ThreadBoundaryModule.cpp new file mode 100644 index 0000000..e4b07d4 --- /dev/null +++ b/research/pipeline/Modules/ThreadBoundaryModule.cpp @@ -0,0 +1,89 @@ + +/* + +class Consumer : public RoutingModule { +public: + Consumer( CommandQueue* b, Format format ) + : RoutingModule(), buffer( b ), formatId( format ) + { } + + void init() + { + } + + void start() + { + for (;;) { + const Command &command = buffer->remove(); + RoutingModule::command( command.command, command.arg ); + } + } + + const char* name() { return "Consumer"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue *buffer; + Format formatId; +}; + +class ConsumerThread : public Thread { +public: + ConsumerThread( Consumer *c ) + : consumer( c ) + { } + + void execute( void* ) + { + consumer->start(); + } + +private: + Consumer *consumer; +}; + + +class ThreadBoundryModule : public RoutingModule { +public: + ThreadBoundryModule( int size, Format format ) + : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ), + consumerThread( &consumer ), formatId( format ) + { + } + + void init() + { + } + + void connectTo( Module *m, const Frame &f ) + { + consumer.connectTo( m, f ); + consumerThread.start(0); + } + + void process( const Frame &frame ) + { + readCommandQueue.add( frame ); + } + + const char *name() { return "Thread Boundry Module"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue readCommandQueue; + Consumer consumer; + ConsumerThread consumerThread; + Format formatId; +}; + + +class ProcessBoundryThing : public DispatchInterface { +public: + void dispatch( Command *command ) + { + } +}; + +*/ diff --git a/research/pipeline/Modules/VideoCameraSourceModule.cpp b/research/pipeline/Modules/VideoCameraSourceModule.cpp new file mode 100644 index 0000000..deef2f9 --- /dev/null +++ b/research/pipeline/Modules/VideoCameraSourceModule.cpp @@ -0,0 +1,101 @@ + + +/* +class VideoCameraSourceModule : public SimpleModule { +public: + VideoCameraSourceModule() + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + AVFormatContext *avFormatContext = 0; + AVFormatParameters vp1, *vp = &vp1; + AVInputFormat *fmt1; + memset(vp, 0, sizeof(*vp)); + fmt1 = av_find_input_format("video4linux");//video_grab_format); + vp->device = 0;//"/dev/video";//video_device; + vp->channel = 0;//video_channel; + vp->standard = "pal";//"ntsc";//video_standard; + vp->width = WIDTH; + vp->height = HEIGHT; + vp->frame_rate = 50; + vp->frame_rate_base = 1; + if (av_open_input_file(&avFormatContext, "", fmt1, 0, vp) < 0) { + printf("Could not find video grab device\n"); + exit(1); + } + if ((avFormatContext->ctx_flags & AVFMTCTX_NOHEADER) && av_find_stream_info(avFormatContext) < 0) { + printf("Could not find video grab parameters\n"); + exit(1); + } + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + +// AVCodecContext *videoCodecContext = avcodec_alloc_context(); + AVCodecContext *videoCodecContext = &avFormatContext->streams[0]->codec; + AVCodec *codec = avcodec_find_decoder(avFormatContext->streams[0]->codec.codec_id); + + if ( !codec ) { + printf("error finding decoder\n"); + return; + } + + printf("found decoder: %s\n", codec->name); + + avFormatContext->streams[0]->r_frame_rate = vp->frame_rate; + avFormatContext->streams[0]->r_frame_rate_base = vp->frame_rate_base; + + videoCodecContext->width = vp->width; + videoCodecContext->height = vp->height; + +// if ( avcodec_open( videoCodecContext, &rawvideo_decoder ) < 0 ) { + if ( avcodec_open( videoCodecContext, codec ) < 0 ) { // is rawvideo_decoder + printf("error opening context\n"); + videoCodecContext = 0; + } + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + AVPacket pkt; + while( avFormatContext ) { + if ( av_read_frame(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + AVFrame *picture = avcodec_alloc_frame(); + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = picture; + Frame *currentFrame = new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + currentFrame->ref(); + + int gotPicture = 0; + avcodec_decode_video( videoCodecContext, picture, &gotPicture, pkt.data, pkt.size ); + + if ( gotPicture ) { + yuvFrame->fmt = videoCodecContext->pix_fmt; // is PIX_FMT_YUV422 + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; +// printf("showing frame: %i %ix%i\n", yuvFrame->fmt, yuvFrame->width, yuvFrame->height ); + SimpleModule::process( *currentFrame ); + } + } + } + } + + const char *name() { return "Video Camera Source"; } + Format inputFormat() { return "FRAME_ID_VIDEO_CAMERA_SOURCE"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } +}; +*/ + diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/Modules/ThreadBoundaryModule.cpp b/research/pipeline/Modules/ThreadBoundaryModule.cpp new file mode 100644 index 0000000..e4b07d4 --- /dev/null +++ b/research/pipeline/Modules/ThreadBoundaryModule.cpp @@ -0,0 +1,89 @@ + +/* + +class Consumer : public RoutingModule { +public: + Consumer( CommandQueue* b, Format format ) + : RoutingModule(), buffer( b ), formatId( format ) + { } + + void init() + { + } + + void start() + { + for (;;) { + const Command &command = buffer->remove(); + RoutingModule::command( command.command, command.arg ); + } + } + + const char* name() { return "Consumer"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue *buffer; + Format formatId; +}; + +class ConsumerThread : public Thread { +public: + ConsumerThread( Consumer *c ) + : consumer( c ) + { } + + void execute( void* ) + { + consumer->start(); + } + +private: + Consumer *consumer; +}; + + +class ThreadBoundryModule : public RoutingModule { +public: + ThreadBoundryModule( int size, Format format ) + : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ), + consumerThread( &consumer ), formatId( format ) + { + } + + void init() + { + } + + void connectTo( Module *m, const Frame &f ) + { + consumer.connectTo( m, f ); + consumerThread.start(0); + } + + void process( const Frame &frame ) + { + readCommandQueue.add( frame ); + } + + const char *name() { return "Thread Boundry Module"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue readCommandQueue; + Consumer consumer; + ConsumerThread consumerThread; + Format formatId; +}; + + +class ProcessBoundryThing : public DispatchInterface { +public: + void dispatch( Command *command ) + { + } +}; + +*/ diff --git a/research/pipeline/Modules/VideoCameraSourceModule.cpp b/research/pipeline/Modules/VideoCameraSourceModule.cpp new file mode 100644 index 0000000..deef2f9 --- /dev/null +++ b/research/pipeline/Modules/VideoCameraSourceModule.cpp @@ -0,0 +1,101 @@ + + +/* +class VideoCameraSourceModule : public SimpleModule { +public: + VideoCameraSourceModule() + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + AVFormatContext *avFormatContext = 0; + AVFormatParameters vp1, *vp = &vp1; + AVInputFormat *fmt1; + memset(vp, 0, sizeof(*vp)); + fmt1 = av_find_input_format("video4linux");//video_grab_format); + vp->device = 0;//"/dev/video";//video_device; + vp->channel = 0;//video_channel; + vp->standard = "pal";//"ntsc";//video_standard; + vp->width = WIDTH; + vp->height = HEIGHT; + vp->frame_rate = 50; + vp->frame_rate_base = 1; + if (av_open_input_file(&avFormatContext, "", fmt1, 0, vp) < 0) { + printf("Could not find video grab device\n"); + exit(1); + } + if ((avFormatContext->ctx_flags & AVFMTCTX_NOHEADER) && av_find_stream_info(avFormatContext) < 0) { + printf("Could not find video grab parameters\n"); + exit(1); + } + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + +// AVCodecContext *videoCodecContext = avcodec_alloc_context(); + AVCodecContext *videoCodecContext = &avFormatContext->streams[0]->codec; + AVCodec *codec = avcodec_find_decoder(avFormatContext->streams[0]->codec.codec_id); + + if ( !codec ) { + printf("error finding decoder\n"); + return; + } + + printf("found decoder: %s\n", codec->name); + + avFormatContext->streams[0]->r_frame_rate = vp->frame_rate; + avFormatContext->streams[0]->r_frame_rate_base = vp->frame_rate_base; + + videoCodecContext->width = vp->width; + videoCodecContext->height = vp->height; + +// if ( avcodec_open( videoCodecContext, &rawvideo_decoder ) < 0 ) { + if ( avcodec_open( videoCodecContext, codec ) < 0 ) { // is rawvideo_decoder + printf("error opening context\n"); + videoCodecContext = 0; + } + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + AVPacket pkt; + while( avFormatContext ) { + if ( av_read_frame(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + AVFrame *picture = avcodec_alloc_frame(); + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = picture; + Frame *currentFrame = new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + currentFrame->ref(); + + int gotPicture = 0; + avcodec_decode_video( videoCodecContext, picture, &gotPicture, pkt.data, pkt.size ); + + if ( gotPicture ) { + yuvFrame->fmt = videoCodecContext->pix_fmt; // is PIX_FMT_YUV422 + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; +// printf("showing frame: %i %ix%i\n", yuvFrame->fmt, yuvFrame->width, yuvFrame->height ); + SimpleModule::process( *currentFrame ); + } + } + } + } + + const char *name() { return "Video Camera Source"; } + Format inputFormat() { return "FRAME_ID_VIDEO_CAMERA_SOURCE"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } +}; +*/ + diff --git a/research/pipeline/PipelineManager.cpp b/research/pipeline/PipelineManager.cpp new file mode 100644 index 0000000..e003559 --- /dev/null +++ b/research/pipeline/PipelineManager.cpp @@ -0,0 +1,162 @@ + +class PipelineManager : public Thread { +public: + PipelineManager(); + void addSource( Format frameType ); + void addDestination( Format frameType ); + void clearTargets(); + void connectTogether(Module *m1, Module *m2, const Frame &f); + void makeConnections(Module *start); + void start( Frame *frame ) { Thread::start( (void *)frame ); } + void execute( void *p ); + void unconnectedRoute( Module *m, const Frame &f ); +private: + std::list sourceModules; + std::list destinationModules; + std::list source; + std::list destination; +}; + + +PipelineManager *pipelineMgr = 0; + + +PipelineManager::PipelineManager() +{ +} + +/* +void PipelineManager::newModule( Module *m ) +{ + printf("adding module: %s\n", m->name() ); + + allModules.push_front( m ); + + // update source modules list + for ( list::iterator it = source.begin(); it != source.end(); ++it ) { + if ( (*it) == m->inputFormat() ) { + sourceModules.push_front( m ); + // Just add it once + break; + } + } + + // update destination modules list + for ( list::iterator it = destination.begin(); it != destination.end(); ++it ) { + if ( (*it) == m->outputFormat() ) { + destinationModules.push_front( m ); + // Just add it once + break; + } + } +} +*/ + +void PipelineManager::addSource( Format frameType ) +{ + // update source modules list + Module *m = moduleMapper()->findModuleWithInputFormat( frameType ); + if ( m ) { + printf("adding source: %s\n", (const char *)frameType ); + source.push_front( frameType ); + sourceModules.push_front( m ); + } else { + printf("No source for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::addDestination( Format frameType ) +{ + Module *m = moduleMapper()->findModuleWithOutputFormat( frameType ); + if ( m ) { + printf("adding destination: %s\n", (const char *)frameType ); + destination.push_front( frameType ); + destinationModules.push_front( m ); + } else { + printf("No destination for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::clearTargets() +{ + sourceModules.clear(); + destinationModules.clear(); + source.clear(); + destination.clear(); +} + +void PipelineManager::connectTogether( Module *m1, Module *m2, const Frame &f ) +{ +/* + //printf(" [%s] -> [%s] %s", m1->outputFormat(), m2->inputFormat(), m2->name() ); + printf(" -> %s", m2->name() ); + + staticDispatch( m2, Init, 0 ); + + if ( m2->isBlocking() || m1->isBlocking() ) { + ThreadBoundryModule *threadModule = new ThreadBoundryModule( 32, m2->inputFormat() ); + threadModule->init(); + m1->connectTo( threadModule, f ); + threadModule->connectTo( m2, f ); + } else { + m1->connectTo( m2, f ); + } +*/ +} + +/* + Connects together module with a module that can process the frame + and then gets the module to process this first frame +*/ +void PipelineManager::unconnectedRoute( Module *m, const Frame &f ) +{ + Module *m2 = moduleMapper()->findModuleWithInputFormat( f.id() ); + if ( m2 ) { + //connectTogether( m, m2, f ); + printf("Connecting together: %s -> %s\n", m->name(), m2->name() ); + staticDispatch( m2, Init, 0 ); + m->connectTo( m2, f ); + m2->connectedFrom( m, f ); + staticDispatch( m2, Process, &f ); + } else { + printf("Didn't find route for %s\n", m->name()); + } +} + +void PipelineManager::makeConnections( Module *start ) +{ +/* + printf("making connections:\n"); + + Frame frame( "UNKNOWN", 0 ); + Module *currentModule = start; + Format dstFmt = destination.front(); + + dispatch( currentModule, Init, 0 ); + printf(" %s (pid: %i)", currentModule->name(), getpid() ); + + while ( currentModule->outputFormat() != dstFmt ) { + Module *m = moduleMapper()->findModuleWithInputFormat( currentModule->outputFormat() ); + if ( m ) { + connectTogether( currentModule, m, frame ); + currentModule = m; + } else { + break; + } + } + printf("\n"); +*/ +} + + +void PipelineManager::execute( void *d ) +{ + printf("starting...\n"); + for ( list::iterator it = sourceModules.begin(); it != sourceModules.end(); ++it ) { + //makeConnections( (*it) ); + staticDispatch( (*it), Init, 0 ); + staticDispatch( (*it), Process, d ); + } +} + + diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/Modules/ThreadBoundaryModule.cpp b/research/pipeline/Modules/ThreadBoundaryModule.cpp new file mode 100644 index 0000000..e4b07d4 --- /dev/null +++ b/research/pipeline/Modules/ThreadBoundaryModule.cpp @@ -0,0 +1,89 @@ + +/* + +class Consumer : public RoutingModule { +public: + Consumer( CommandQueue* b, Format format ) + : RoutingModule(), buffer( b ), formatId( format ) + { } + + void init() + { + } + + void start() + { + for (;;) { + const Command &command = buffer->remove(); + RoutingModule::command( command.command, command.arg ); + } + } + + const char* name() { return "Consumer"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue *buffer; + Format formatId; +}; + +class ConsumerThread : public Thread { +public: + ConsumerThread( Consumer *c ) + : consumer( c ) + { } + + void execute( void* ) + { + consumer->start(); + } + +private: + Consumer *consumer; +}; + + +class ThreadBoundryModule : public RoutingModule { +public: + ThreadBoundryModule( int size, Format format ) + : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ), + consumerThread( &consumer ), formatId( format ) + { + } + + void init() + { + } + + void connectTo( Module *m, const Frame &f ) + { + consumer.connectTo( m, f ); + consumerThread.start(0); + } + + void process( const Frame &frame ) + { + readCommandQueue.add( frame ); + } + + const char *name() { return "Thread Boundry Module"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue readCommandQueue; + Consumer consumer; + ConsumerThread consumerThread; + Format formatId; +}; + + +class ProcessBoundryThing : public DispatchInterface { +public: + void dispatch( Command *command ) + { + } +}; + +*/ diff --git a/research/pipeline/Modules/VideoCameraSourceModule.cpp b/research/pipeline/Modules/VideoCameraSourceModule.cpp new file mode 100644 index 0000000..deef2f9 --- /dev/null +++ b/research/pipeline/Modules/VideoCameraSourceModule.cpp @@ -0,0 +1,101 @@ + + +/* +class VideoCameraSourceModule : public SimpleModule { +public: + VideoCameraSourceModule() + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + AVFormatContext *avFormatContext = 0; + AVFormatParameters vp1, *vp = &vp1; + AVInputFormat *fmt1; + memset(vp, 0, sizeof(*vp)); + fmt1 = av_find_input_format("video4linux");//video_grab_format); + vp->device = 0;//"/dev/video";//video_device; + vp->channel = 0;//video_channel; + vp->standard = "pal";//"ntsc";//video_standard; + vp->width = WIDTH; + vp->height = HEIGHT; + vp->frame_rate = 50; + vp->frame_rate_base = 1; + if (av_open_input_file(&avFormatContext, "", fmt1, 0, vp) < 0) { + printf("Could not find video grab device\n"); + exit(1); + } + if ((avFormatContext->ctx_flags & AVFMTCTX_NOHEADER) && av_find_stream_info(avFormatContext) < 0) { + printf("Could not find video grab parameters\n"); + exit(1); + } + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + +// AVCodecContext *videoCodecContext = avcodec_alloc_context(); + AVCodecContext *videoCodecContext = &avFormatContext->streams[0]->codec; + AVCodec *codec = avcodec_find_decoder(avFormatContext->streams[0]->codec.codec_id); + + if ( !codec ) { + printf("error finding decoder\n"); + return; + } + + printf("found decoder: %s\n", codec->name); + + avFormatContext->streams[0]->r_frame_rate = vp->frame_rate; + avFormatContext->streams[0]->r_frame_rate_base = vp->frame_rate_base; + + videoCodecContext->width = vp->width; + videoCodecContext->height = vp->height; + +// if ( avcodec_open( videoCodecContext, &rawvideo_decoder ) < 0 ) { + if ( avcodec_open( videoCodecContext, codec ) < 0 ) { // is rawvideo_decoder + printf("error opening context\n"); + videoCodecContext = 0; + } + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + AVPacket pkt; + while( avFormatContext ) { + if ( av_read_frame(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + AVFrame *picture = avcodec_alloc_frame(); + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = picture; + Frame *currentFrame = new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + currentFrame->ref(); + + int gotPicture = 0; + avcodec_decode_video( videoCodecContext, picture, &gotPicture, pkt.data, pkt.size ); + + if ( gotPicture ) { + yuvFrame->fmt = videoCodecContext->pix_fmt; // is PIX_FMT_YUV422 + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; +// printf("showing frame: %i %ix%i\n", yuvFrame->fmt, yuvFrame->width, yuvFrame->height ); + SimpleModule::process( *currentFrame ); + } + } + } + } + + const char *name() { return "Video Camera Source"; } + Format inputFormat() { return "FRAME_ID_VIDEO_CAMERA_SOURCE"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } +}; +*/ + diff --git a/research/pipeline/PipelineManager.cpp b/research/pipeline/PipelineManager.cpp new file mode 100644 index 0000000..e003559 --- /dev/null +++ b/research/pipeline/PipelineManager.cpp @@ -0,0 +1,162 @@ + +class PipelineManager : public Thread { +public: + PipelineManager(); + void addSource( Format frameType ); + void addDestination( Format frameType ); + void clearTargets(); + void connectTogether(Module *m1, Module *m2, const Frame &f); + void makeConnections(Module *start); + void start( Frame *frame ) { Thread::start( (void *)frame ); } + void execute( void *p ); + void unconnectedRoute( Module *m, const Frame &f ); +private: + std::list sourceModules; + std::list destinationModules; + std::list source; + std::list destination; +}; + + +PipelineManager *pipelineMgr = 0; + + +PipelineManager::PipelineManager() +{ +} + +/* +void PipelineManager::newModule( Module *m ) +{ + printf("adding module: %s\n", m->name() ); + + allModules.push_front( m ); + + // update source modules list + for ( list::iterator it = source.begin(); it != source.end(); ++it ) { + if ( (*it) == m->inputFormat() ) { + sourceModules.push_front( m ); + // Just add it once + break; + } + } + + // update destination modules list + for ( list::iterator it = destination.begin(); it != destination.end(); ++it ) { + if ( (*it) == m->outputFormat() ) { + destinationModules.push_front( m ); + // Just add it once + break; + } + } +} +*/ + +void PipelineManager::addSource( Format frameType ) +{ + // update source modules list + Module *m = moduleMapper()->findModuleWithInputFormat( frameType ); + if ( m ) { + printf("adding source: %s\n", (const char *)frameType ); + source.push_front( frameType ); + sourceModules.push_front( m ); + } else { + printf("No source for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::addDestination( Format frameType ) +{ + Module *m = moduleMapper()->findModuleWithOutputFormat( frameType ); + if ( m ) { + printf("adding destination: %s\n", (const char *)frameType ); + destination.push_front( frameType ); + destinationModules.push_front( m ); + } else { + printf("No destination for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::clearTargets() +{ + sourceModules.clear(); + destinationModules.clear(); + source.clear(); + destination.clear(); +} + +void PipelineManager::connectTogether( Module *m1, Module *m2, const Frame &f ) +{ +/* + //printf(" [%s] -> [%s] %s", m1->outputFormat(), m2->inputFormat(), m2->name() ); + printf(" -> %s", m2->name() ); + + staticDispatch( m2, Init, 0 ); + + if ( m2->isBlocking() || m1->isBlocking() ) { + ThreadBoundryModule *threadModule = new ThreadBoundryModule( 32, m2->inputFormat() ); + threadModule->init(); + m1->connectTo( threadModule, f ); + threadModule->connectTo( m2, f ); + } else { + m1->connectTo( m2, f ); + } +*/ +} + +/* + Connects together module with a module that can process the frame + and then gets the module to process this first frame +*/ +void PipelineManager::unconnectedRoute( Module *m, const Frame &f ) +{ + Module *m2 = moduleMapper()->findModuleWithInputFormat( f.id() ); + if ( m2 ) { + //connectTogether( m, m2, f ); + printf("Connecting together: %s -> %s\n", m->name(), m2->name() ); + staticDispatch( m2, Init, 0 ); + m->connectTo( m2, f ); + m2->connectedFrom( m, f ); + staticDispatch( m2, Process, &f ); + } else { + printf("Didn't find route for %s\n", m->name()); + } +} + +void PipelineManager::makeConnections( Module *start ) +{ +/* + printf("making connections:\n"); + + Frame frame( "UNKNOWN", 0 ); + Module *currentModule = start; + Format dstFmt = destination.front(); + + dispatch( currentModule, Init, 0 ); + printf(" %s (pid: %i)", currentModule->name(), getpid() ); + + while ( currentModule->outputFormat() != dstFmt ) { + Module *m = moduleMapper()->findModuleWithInputFormat( currentModule->outputFormat() ); + if ( m ) { + connectTogether( currentModule, m, frame ); + currentModule = m; + } else { + break; + } + } + printf("\n"); +*/ +} + + +void PipelineManager::execute( void *d ) +{ + printf("starting...\n"); + for ( list::iterator it = sourceModules.begin(); it != sourceModules.end(); ++it ) { + //makeConnections( (*it) ); + staticDispatch( (*it), Init, 0 ); + staticDispatch( (*it), Process, d ); + } +} + + diff --git a/research/pipeline/README.md b/research/pipeline/README.md new file mode 100644 index 0000000..8df026f --- /dev/null +++ b/research/pipeline/README.md @@ -0,0 +1,30 @@ + + +Example sources to support: + +file:/home/user/Documents/images/jpeg/picture.jpg +file:/home/user/Documents/audio/mpeg/greatestsong.mp3 +file:/home/user/Documents/application/playlist/favourites.pls +file:/home/user/Documents/application/playlist/favourites.mpu +http://www.slashdot.org/somefile.mpg +http://www.streaming_radio_server.net:9000 +http://www.streaming_tv_server.net:9000 +camera +microphone +camera & microphone + + +Example outputs to support: + +File/URL +UDP packets +TCP/IP packets +OSS +Alsa +QSS +Visualiser +QDirectPainter +QPainter +XShm +DirectDraw +YUV acceleration diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/Modules/ThreadBoundaryModule.cpp b/research/pipeline/Modules/ThreadBoundaryModule.cpp new file mode 100644 index 0000000..e4b07d4 --- /dev/null +++ b/research/pipeline/Modules/ThreadBoundaryModule.cpp @@ -0,0 +1,89 @@ + +/* + +class Consumer : public RoutingModule { +public: + Consumer( CommandQueue* b, Format format ) + : RoutingModule(), buffer( b ), formatId( format ) + { } + + void init() + { + } + + void start() + { + for (;;) { + const Command &command = buffer->remove(); + RoutingModule::command( command.command, command.arg ); + } + } + + const char* name() { return "Consumer"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue *buffer; + Format formatId; +}; + +class ConsumerThread : public Thread { +public: + ConsumerThread( Consumer *c ) + : consumer( c ) + { } + + void execute( void* ) + { + consumer->start(); + } + +private: + Consumer *consumer; +}; + + +class ThreadBoundryModule : public RoutingModule { +public: + ThreadBoundryModule( int size, Format format ) + : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ), + consumerThread( &consumer ), formatId( format ) + { + } + + void init() + { + } + + void connectTo( Module *m, const Frame &f ) + { + consumer.connectTo( m, f ); + consumerThread.start(0); + } + + void process( const Frame &frame ) + { + readCommandQueue.add( frame ); + } + + const char *name() { return "Thread Boundry Module"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue readCommandQueue; + Consumer consumer; + ConsumerThread consumerThread; + Format formatId; +}; + + +class ProcessBoundryThing : public DispatchInterface { +public: + void dispatch( Command *command ) + { + } +}; + +*/ diff --git a/research/pipeline/Modules/VideoCameraSourceModule.cpp b/research/pipeline/Modules/VideoCameraSourceModule.cpp new file mode 100644 index 0000000..deef2f9 --- /dev/null +++ b/research/pipeline/Modules/VideoCameraSourceModule.cpp @@ -0,0 +1,101 @@ + + +/* +class VideoCameraSourceModule : public SimpleModule { +public: + VideoCameraSourceModule() + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + AVFormatContext *avFormatContext = 0; + AVFormatParameters vp1, *vp = &vp1; + AVInputFormat *fmt1; + memset(vp, 0, sizeof(*vp)); + fmt1 = av_find_input_format("video4linux");//video_grab_format); + vp->device = 0;//"/dev/video";//video_device; + vp->channel = 0;//video_channel; + vp->standard = "pal";//"ntsc";//video_standard; + vp->width = WIDTH; + vp->height = HEIGHT; + vp->frame_rate = 50; + vp->frame_rate_base = 1; + if (av_open_input_file(&avFormatContext, "", fmt1, 0, vp) < 0) { + printf("Could not find video grab device\n"); + exit(1); + } + if ((avFormatContext->ctx_flags & AVFMTCTX_NOHEADER) && av_find_stream_info(avFormatContext) < 0) { + printf("Could not find video grab parameters\n"); + exit(1); + } + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + +// AVCodecContext *videoCodecContext = avcodec_alloc_context(); + AVCodecContext *videoCodecContext = &avFormatContext->streams[0]->codec; + AVCodec *codec = avcodec_find_decoder(avFormatContext->streams[0]->codec.codec_id); + + if ( !codec ) { + printf("error finding decoder\n"); + return; + } + + printf("found decoder: %s\n", codec->name); + + avFormatContext->streams[0]->r_frame_rate = vp->frame_rate; + avFormatContext->streams[0]->r_frame_rate_base = vp->frame_rate_base; + + videoCodecContext->width = vp->width; + videoCodecContext->height = vp->height; + +// if ( avcodec_open( videoCodecContext, &rawvideo_decoder ) < 0 ) { + if ( avcodec_open( videoCodecContext, codec ) < 0 ) { // is rawvideo_decoder + printf("error opening context\n"); + videoCodecContext = 0; + } + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + AVPacket pkt; + while( avFormatContext ) { + if ( av_read_frame(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + AVFrame *picture = avcodec_alloc_frame(); + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = picture; + Frame *currentFrame = new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + currentFrame->ref(); + + int gotPicture = 0; + avcodec_decode_video( videoCodecContext, picture, &gotPicture, pkt.data, pkt.size ); + + if ( gotPicture ) { + yuvFrame->fmt = videoCodecContext->pix_fmt; // is PIX_FMT_YUV422 + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; +// printf("showing frame: %i %ix%i\n", yuvFrame->fmt, yuvFrame->width, yuvFrame->height ); + SimpleModule::process( *currentFrame ); + } + } + } + } + + const char *name() { return "Video Camera Source"; } + Format inputFormat() { return "FRAME_ID_VIDEO_CAMERA_SOURCE"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } +}; +*/ + diff --git a/research/pipeline/PipelineManager.cpp b/research/pipeline/PipelineManager.cpp new file mode 100644 index 0000000..e003559 --- /dev/null +++ b/research/pipeline/PipelineManager.cpp @@ -0,0 +1,162 @@ + +class PipelineManager : public Thread { +public: + PipelineManager(); + void addSource( Format frameType ); + void addDestination( Format frameType ); + void clearTargets(); + void connectTogether(Module *m1, Module *m2, const Frame &f); + void makeConnections(Module *start); + void start( Frame *frame ) { Thread::start( (void *)frame ); } + void execute( void *p ); + void unconnectedRoute( Module *m, const Frame &f ); +private: + std::list sourceModules; + std::list destinationModules; + std::list source; + std::list destination; +}; + + +PipelineManager *pipelineMgr = 0; + + +PipelineManager::PipelineManager() +{ +} + +/* +void PipelineManager::newModule( Module *m ) +{ + printf("adding module: %s\n", m->name() ); + + allModules.push_front( m ); + + // update source modules list + for ( list::iterator it = source.begin(); it != source.end(); ++it ) { + if ( (*it) == m->inputFormat() ) { + sourceModules.push_front( m ); + // Just add it once + break; + } + } + + // update destination modules list + for ( list::iterator it = destination.begin(); it != destination.end(); ++it ) { + if ( (*it) == m->outputFormat() ) { + destinationModules.push_front( m ); + // Just add it once + break; + } + } +} +*/ + +void PipelineManager::addSource( Format frameType ) +{ + // update source modules list + Module *m = moduleMapper()->findModuleWithInputFormat( frameType ); + if ( m ) { + printf("adding source: %s\n", (const char *)frameType ); + source.push_front( frameType ); + sourceModules.push_front( m ); + } else { + printf("No source for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::addDestination( Format frameType ) +{ + Module *m = moduleMapper()->findModuleWithOutputFormat( frameType ); + if ( m ) { + printf("adding destination: %s\n", (const char *)frameType ); + destination.push_front( frameType ); + destinationModules.push_front( m ); + } else { + printf("No destination for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::clearTargets() +{ + sourceModules.clear(); + destinationModules.clear(); + source.clear(); + destination.clear(); +} + +void PipelineManager::connectTogether( Module *m1, Module *m2, const Frame &f ) +{ +/* + //printf(" [%s] -> [%s] %s", m1->outputFormat(), m2->inputFormat(), m2->name() ); + printf(" -> %s", m2->name() ); + + staticDispatch( m2, Init, 0 ); + + if ( m2->isBlocking() || m1->isBlocking() ) { + ThreadBoundryModule *threadModule = new ThreadBoundryModule( 32, m2->inputFormat() ); + threadModule->init(); + m1->connectTo( threadModule, f ); + threadModule->connectTo( m2, f ); + } else { + m1->connectTo( m2, f ); + } +*/ +} + +/* + Connects together module with a module that can process the frame + and then gets the module to process this first frame +*/ +void PipelineManager::unconnectedRoute( Module *m, const Frame &f ) +{ + Module *m2 = moduleMapper()->findModuleWithInputFormat( f.id() ); + if ( m2 ) { + //connectTogether( m, m2, f ); + printf("Connecting together: %s -> %s\n", m->name(), m2->name() ); + staticDispatch( m2, Init, 0 ); + m->connectTo( m2, f ); + m2->connectedFrom( m, f ); + staticDispatch( m2, Process, &f ); + } else { + printf("Didn't find route for %s\n", m->name()); + } +} + +void PipelineManager::makeConnections( Module *start ) +{ +/* + printf("making connections:\n"); + + Frame frame( "UNKNOWN", 0 ); + Module *currentModule = start; + Format dstFmt = destination.front(); + + dispatch( currentModule, Init, 0 ); + printf(" %s (pid: %i)", currentModule->name(), getpid() ); + + while ( currentModule->outputFormat() != dstFmt ) { + Module *m = moduleMapper()->findModuleWithInputFormat( currentModule->outputFormat() ); + if ( m ) { + connectTogether( currentModule, m, frame ); + currentModule = m; + } else { + break; + } + } + printf("\n"); +*/ +} + + +void PipelineManager::execute( void *d ) +{ + printf("starting...\n"); + for ( list::iterator it = sourceModules.begin(); it != sourceModules.end(); ++it ) { + //makeConnections( (*it) ); + staticDispatch( (*it), Init, 0 ); + staticDispatch( (*it), Process, d ); + } +} + + diff --git a/research/pipeline/README.md b/research/pipeline/README.md new file mode 100644 index 0000000..8df026f --- /dev/null +++ b/research/pipeline/README.md @@ -0,0 +1,30 @@ + + +Example sources to support: + +file:/home/user/Documents/images/jpeg/picture.jpg +file:/home/user/Documents/audio/mpeg/greatestsong.mp3 +file:/home/user/Documents/application/playlist/favourites.pls +file:/home/user/Documents/application/playlist/favourites.mpu +http://www.slashdot.org/somefile.mpg +http://www.streaming_radio_server.net:9000 +http://www.streaming_tv_server.net:9000 +camera +microphone +camera & microphone + + +Example outputs to support: + +File/URL +UDP packets +TCP/IP packets +OSS +Alsa +QSS +Visualiser +QDirectPainter +QPainter +XShm +DirectDraw +YUV acceleration diff --git a/research/pipeline/Types/Deadcode.cpp b/research/pipeline/Types/Deadcode.cpp new file mode 100644 index 0000000..d08e52a --- /dev/null +++ b/research/pipeline/Types/Deadcode.cpp @@ -0,0 +1,140 @@ + + +#if 0 + +1 = registerNewFormat("AAC", ".aac", "An AAC decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MP3 decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MAD decoder", AUDIO_CODEC); +1 = registerNewFormat("AAC", ".aac", "My AAC decoder", AUDIO_CODEC); +3 = registerNewFormat("3DS", ".3ds", "3D Studio File", AUDIO_CODEC); + +enum FormatType { + FRAME_ID_FILE_PROTO, + FRAME_ID_HTTP_PROTO, + FRAME_ID_RTSP_PROTO, + FRAME_ID_RTP_PROTO, + FRAME_ID_MMS_PROTO, + + FRAME_ID_GIF_FORMAT, + FRAME_ID_JPG_FORMAT, + FRAME_ID_PNG_FORMAT, + + FRAME_ID_MP3_FORMAT, + FRAME_ID_WAV_FORMAT, + FRAME_ID_GSM_FORMAT, + FRAME_ID_AMR_FORMAT, + + FRAME_ID_MPG_FORMAT, + FRAME_ID_AVI_FORMAT, + FRAME_ID_MP4_FORMAT, + FRAME_ID_MOV_FORMAT, + + FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG_AUDIO_PACKET = FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET, + FRAME_ID_GSM_AUDIO_PACKET, + FRAME_ID_AMR_AUDIO_PACKET, + FRAME_ID_AAC_AUDIO_PACKET, + FRAME_ID_LAST_PACKET_TYPE = FRAME_ID_AMR_AUDIO_PACKET, + + FRAME_ID_VIDEO_PACKET, + FRAME_ID_AUDIO_PACKET, + + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME, + + FRAME_ID_PCM_AUDIO_DATA, + + FRAME_ID_RENDERED_AUDIO, + FRAME_ID_RENDERED_VIDEO, + + FRAME_ID_URL_SOURCE, + FRAME_ID_AUDIO_SOURCE, + FRAME_ID_VIDEO_SOURCE, + + FRAME_ID_MULTIPLE_FORMAT, + FRAME_ID_ANY_ONE_OF_FORMAT, + + FRAME_ID_MULTIPLE_PACKET, + FRAME_ID_ANY_ONE_OF_PACKET, + + FRAME_ID_UNKNOWN +}; + +typedef struct FRAME_GENERIC { +/* + int generalId; + int specificId; + int streamId; +*/ + int bytes; + char* bits; + int pts; +}; + +enum videoCodecId { + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET +}; + +typedef struct FRAME_VIDEO_PACKET { + int codecId; + int bytes; + char* bits; +}; + +enum videoFrameFormat { + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME +}; + +typedef struct FRAME_VIDEO_FRAME { + int format; + int width; + int height; + int bytes; + char* bits; +}; + +struct UpPCMPacket { + int freq; + int bitsPerSample; + int size; + char data[1]; +}; + +struct DownPCMPacket { + +}; + +#endif + + + +/* +struct StreamPacket { + void *private; // AVPacket *pkt; + int streamId; + int size; + char *data; +}; +*/ + +/* +struct StreamPacket { + int streamId; + Frame frame; +}; +*/ + diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/Modules/ThreadBoundaryModule.cpp b/research/pipeline/Modules/ThreadBoundaryModule.cpp new file mode 100644 index 0000000..e4b07d4 --- /dev/null +++ b/research/pipeline/Modules/ThreadBoundaryModule.cpp @@ -0,0 +1,89 @@ + +/* + +class Consumer : public RoutingModule { +public: + Consumer( CommandQueue* b, Format format ) + : RoutingModule(), buffer( b ), formatId( format ) + { } + + void init() + { + } + + void start() + { + for (;;) { + const Command &command = buffer->remove(); + RoutingModule::command( command.command, command.arg ); + } + } + + const char* name() { return "Consumer"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue *buffer; + Format formatId; +}; + +class ConsumerThread : public Thread { +public: + ConsumerThread( Consumer *c ) + : consumer( c ) + { } + + void execute( void* ) + { + consumer->start(); + } + +private: + Consumer *consumer; +}; + + +class ThreadBoundryModule : public RoutingModule { +public: + ThreadBoundryModule( int size, Format format ) + : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ), + consumerThread( &consumer ), formatId( format ) + { + } + + void init() + { + } + + void connectTo( Module *m, const Frame &f ) + { + consumer.connectTo( m, f ); + consumerThread.start(0); + } + + void process( const Frame &frame ) + { + readCommandQueue.add( frame ); + } + + const char *name() { return "Thread Boundry Module"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue readCommandQueue; + Consumer consumer; + ConsumerThread consumerThread; + Format formatId; +}; + + +class ProcessBoundryThing : public DispatchInterface { +public: + void dispatch( Command *command ) + { + } +}; + +*/ diff --git a/research/pipeline/Modules/VideoCameraSourceModule.cpp b/research/pipeline/Modules/VideoCameraSourceModule.cpp new file mode 100644 index 0000000..deef2f9 --- /dev/null +++ b/research/pipeline/Modules/VideoCameraSourceModule.cpp @@ -0,0 +1,101 @@ + + +/* +class VideoCameraSourceModule : public SimpleModule { +public: + VideoCameraSourceModule() + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + AVFormatContext *avFormatContext = 0; + AVFormatParameters vp1, *vp = &vp1; + AVInputFormat *fmt1; + memset(vp, 0, sizeof(*vp)); + fmt1 = av_find_input_format("video4linux");//video_grab_format); + vp->device = 0;//"/dev/video";//video_device; + vp->channel = 0;//video_channel; + vp->standard = "pal";//"ntsc";//video_standard; + vp->width = WIDTH; + vp->height = HEIGHT; + vp->frame_rate = 50; + vp->frame_rate_base = 1; + if (av_open_input_file(&avFormatContext, "", fmt1, 0, vp) < 0) { + printf("Could not find video grab device\n"); + exit(1); + } + if ((avFormatContext->ctx_flags & AVFMTCTX_NOHEADER) && av_find_stream_info(avFormatContext) < 0) { + printf("Could not find video grab parameters\n"); + exit(1); + } + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + +// AVCodecContext *videoCodecContext = avcodec_alloc_context(); + AVCodecContext *videoCodecContext = &avFormatContext->streams[0]->codec; + AVCodec *codec = avcodec_find_decoder(avFormatContext->streams[0]->codec.codec_id); + + if ( !codec ) { + printf("error finding decoder\n"); + return; + } + + printf("found decoder: %s\n", codec->name); + + avFormatContext->streams[0]->r_frame_rate = vp->frame_rate; + avFormatContext->streams[0]->r_frame_rate_base = vp->frame_rate_base; + + videoCodecContext->width = vp->width; + videoCodecContext->height = vp->height; + +// if ( avcodec_open( videoCodecContext, &rawvideo_decoder ) < 0 ) { + if ( avcodec_open( videoCodecContext, codec ) < 0 ) { // is rawvideo_decoder + printf("error opening context\n"); + videoCodecContext = 0; + } + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + AVPacket pkt; + while( avFormatContext ) { + if ( av_read_frame(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + AVFrame *picture = avcodec_alloc_frame(); + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = picture; + Frame *currentFrame = new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + currentFrame->ref(); + + int gotPicture = 0; + avcodec_decode_video( videoCodecContext, picture, &gotPicture, pkt.data, pkt.size ); + + if ( gotPicture ) { + yuvFrame->fmt = videoCodecContext->pix_fmt; // is PIX_FMT_YUV422 + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; +// printf("showing frame: %i %ix%i\n", yuvFrame->fmt, yuvFrame->width, yuvFrame->height ); + SimpleModule::process( *currentFrame ); + } + } + } + } + + const char *name() { return "Video Camera Source"; } + Format inputFormat() { return "FRAME_ID_VIDEO_CAMERA_SOURCE"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } +}; +*/ + diff --git a/research/pipeline/PipelineManager.cpp b/research/pipeline/PipelineManager.cpp new file mode 100644 index 0000000..e003559 --- /dev/null +++ b/research/pipeline/PipelineManager.cpp @@ -0,0 +1,162 @@ + +class PipelineManager : public Thread { +public: + PipelineManager(); + void addSource( Format frameType ); + void addDestination( Format frameType ); + void clearTargets(); + void connectTogether(Module *m1, Module *m2, const Frame &f); + void makeConnections(Module *start); + void start( Frame *frame ) { Thread::start( (void *)frame ); } + void execute( void *p ); + void unconnectedRoute( Module *m, const Frame &f ); +private: + std::list sourceModules; + std::list destinationModules; + std::list source; + std::list destination; +}; + + +PipelineManager *pipelineMgr = 0; + + +PipelineManager::PipelineManager() +{ +} + +/* +void PipelineManager::newModule( Module *m ) +{ + printf("adding module: %s\n", m->name() ); + + allModules.push_front( m ); + + // update source modules list + for ( list::iterator it = source.begin(); it != source.end(); ++it ) { + if ( (*it) == m->inputFormat() ) { + sourceModules.push_front( m ); + // Just add it once + break; + } + } + + // update destination modules list + for ( list::iterator it = destination.begin(); it != destination.end(); ++it ) { + if ( (*it) == m->outputFormat() ) { + destinationModules.push_front( m ); + // Just add it once + break; + } + } +} +*/ + +void PipelineManager::addSource( Format frameType ) +{ + // update source modules list + Module *m = moduleMapper()->findModuleWithInputFormat( frameType ); + if ( m ) { + printf("adding source: %s\n", (const char *)frameType ); + source.push_front( frameType ); + sourceModules.push_front( m ); + } else { + printf("No source for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::addDestination( Format frameType ) +{ + Module *m = moduleMapper()->findModuleWithOutputFormat( frameType ); + if ( m ) { + printf("adding destination: %s\n", (const char *)frameType ); + destination.push_front( frameType ); + destinationModules.push_front( m ); + } else { + printf("No destination for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::clearTargets() +{ + sourceModules.clear(); + destinationModules.clear(); + source.clear(); + destination.clear(); +} + +void PipelineManager::connectTogether( Module *m1, Module *m2, const Frame &f ) +{ +/* + //printf(" [%s] -> [%s] %s", m1->outputFormat(), m2->inputFormat(), m2->name() ); + printf(" -> %s", m2->name() ); + + staticDispatch( m2, Init, 0 ); + + if ( m2->isBlocking() || m1->isBlocking() ) { + ThreadBoundryModule *threadModule = new ThreadBoundryModule( 32, m2->inputFormat() ); + threadModule->init(); + m1->connectTo( threadModule, f ); + threadModule->connectTo( m2, f ); + } else { + m1->connectTo( m2, f ); + } +*/ +} + +/* + Connects together module with a module that can process the frame + and then gets the module to process this first frame +*/ +void PipelineManager::unconnectedRoute( Module *m, const Frame &f ) +{ + Module *m2 = moduleMapper()->findModuleWithInputFormat( f.id() ); + if ( m2 ) { + //connectTogether( m, m2, f ); + printf("Connecting together: %s -> %s\n", m->name(), m2->name() ); + staticDispatch( m2, Init, 0 ); + m->connectTo( m2, f ); + m2->connectedFrom( m, f ); + staticDispatch( m2, Process, &f ); + } else { + printf("Didn't find route for %s\n", m->name()); + } +} + +void PipelineManager::makeConnections( Module *start ) +{ +/* + printf("making connections:\n"); + + Frame frame( "UNKNOWN", 0 ); + Module *currentModule = start; + Format dstFmt = destination.front(); + + dispatch( currentModule, Init, 0 ); + printf(" %s (pid: %i)", currentModule->name(), getpid() ); + + while ( currentModule->outputFormat() != dstFmt ) { + Module *m = moduleMapper()->findModuleWithInputFormat( currentModule->outputFormat() ); + if ( m ) { + connectTogether( currentModule, m, frame ); + currentModule = m; + } else { + break; + } + } + printf("\n"); +*/ +} + + +void PipelineManager::execute( void *d ) +{ + printf("starting...\n"); + for ( list::iterator it = sourceModules.begin(); it != sourceModules.end(); ++it ) { + //makeConnections( (*it) ); + staticDispatch( (*it), Init, 0 ); + staticDispatch( (*it), Process, d ); + } +} + + diff --git a/research/pipeline/README.md b/research/pipeline/README.md new file mode 100644 index 0000000..8df026f --- /dev/null +++ b/research/pipeline/README.md @@ -0,0 +1,30 @@ + + +Example sources to support: + +file:/home/user/Documents/images/jpeg/picture.jpg +file:/home/user/Documents/audio/mpeg/greatestsong.mp3 +file:/home/user/Documents/application/playlist/favourites.pls +file:/home/user/Documents/application/playlist/favourites.mpu +http://www.slashdot.org/somefile.mpg +http://www.streaming_radio_server.net:9000 +http://www.streaming_tv_server.net:9000 +camera +microphone +camera & microphone + + +Example outputs to support: + +File/URL +UDP packets +TCP/IP packets +OSS +Alsa +QSS +Visualiser +QDirectPainter +QPainter +XShm +DirectDraw +YUV acceleration diff --git a/research/pipeline/Types/Deadcode.cpp b/research/pipeline/Types/Deadcode.cpp new file mode 100644 index 0000000..d08e52a --- /dev/null +++ b/research/pipeline/Types/Deadcode.cpp @@ -0,0 +1,140 @@ + + +#if 0 + +1 = registerNewFormat("AAC", ".aac", "An AAC decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MP3 decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MAD decoder", AUDIO_CODEC); +1 = registerNewFormat("AAC", ".aac", "My AAC decoder", AUDIO_CODEC); +3 = registerNewFormat("3DS", ".3ds", "3D Studio File", AUDIO_CODEC); + +enum FormatType { + FRAME_ID_FILE_PROTO, + FRAME_ID_HTTP_PROTO, + FRAME_ID_RTSP_PROTO, + FRAME_ID_RTP_PROTO, + FRAME_ID_MMS_PROTO, + + FRAME_ID_GIF_FORMAT, + FRAME_ID_JPG_FORMAT, + FRAME_ID_PNG_FORMAT, + + FRAME_ID_MP3_FORMAT, + FRAME_ID_WAV_FORMAT, + FRAME_ID_GSM_FORMAT, + FRAME_ID_AMR_FORMAT, + + FRAME_ID_MPG_FORMAT, + FRAME_ID_AVI_FORMAT, + FRAME_ID_MP4_FORMAT, + FRAME_ID_MOV_FORMAT, + + FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG_AUDIO_PACKET = FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET, + FRAME_ID_GSM_AUDIO_PACKET, + FRAME_ID_AMR_AUDIO_PACKET, + FRAME_ID_AAC_AUDIO_PACKET, + FRAME_ID_LAST_PACKET_TYPE = FRAME_ID_AMR_AUDIO_PACKET, + + FRAME_ID_VIDEO_PACKET, + FRAME_ID_AUDIO_PACKET, + + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME, + + FRAME_ID_PCM_AUDIO_DATA, + + FRAME_ID_RENDERED_AUDIO, + FRAME_ID_RENDERED_VIDEO, + + FRAME_ID_URL_SOURCE, + FRAME_ID_AUDIO_SOURCE, + FRAME_ID_VIDEO_SOURCE, + + FRAME_ID_MULTIPLE_FORMAT, + FRAME_ID_ANY_ONE_OF_FORMAT, + + FRAME_ID_MULTIPLE_PACKET, + FRAME_ID_ANY_ONE_OF_PACKET, + + FRAME_ID_UNKNOWN +}; + +typedef struct FRAME_GENERIC { +/* + int generalId; + int specificId; + int streamId; +*/ + int bytes; + char* bits; + int pts; +}; + +enum videoCodecId { + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET +}; + +typedef struct FRAME_VIDEO_PACKET { + int codecId; + int bytes; + char* bits; +}; + +enum videoFrameFormat { + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME +}; + +typedef struct FRAME_VIDEO_FRAME { + int format; + int width; + int height; + int bytes; + char* bits; +}; + +struct UpPCMPacket { + int freq; + int bitsPerSample; + int size; + char data[1]; +}; + +struct DownPCMPacket { + +}; + +#endif + + + +/* +struct StreamPacket { + void *private; // AVPacket *pkt; + int streamId; + int size; + char *data; +}; +*/ + +/* +struct StreamPacket { + int streamId; + Frame frame; +}; +*/ + diff --git a/research/pipeline/Types/Format.hpp b/research/pipeline/Types/Format.hpp new file mode 100644 index 0000000..72642b6 --- /dev/null +++ b/research/pipeline/Types/Format.hpp @@ -0,0 +1,29 @@ +#pragma once +#include + +// Format +class Format +{ +public: + Format() : s(nullptr) { } + Format(const Format &other) : s( other.s ) { } + Format(const char *str) : s( str ) { } + bool operator==(const Format& other) + { + return !std::strcmp(other.s, s); + } + operator const char *() + { + return s; + } + bool operator==(const char *other) + { + return !std::strcmp(s, other); + } + bool operator<(const Format& other) const + { + return std::strcmp(other.s, s) < 0; + } +private: + const char *s; +}; diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/Modules/ThreadBoundaryModule.cpp b/research/pipeline/Modules/ThreadBoundaryModule.cpp new file mode 100644 index 0000000..e4b07d4 --- /dev/null +++ b/research/pipeline/Modules/ThreadBoundaryModule.cpp @@ -0,0 +1,89 @@ + +/* + +class Consumer : public RoutingModule { +public: + Consumer( CommandQueue* b, Format format ) + : RoutingModule(), buffer( b ), formatId( format ) + { } + + void init() + { + } + + void start() + { + for (;;) { + const Command &command = buffer->remove(); + RoutingModule::command( command.command, command.arg ); + } + } + + const char* name() { return "Consumer"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue *buffer; + Format formatId; +}; + +class ConsumerThread : public Thread { +public: + ConsumerThread( Consumer *c ) + : consumer( c ) + { } + + void execute( void* ) + { + consumer->start(); + } + +private: + Consumer *consumer; +}; + + +class ThreadBoundryModule : public RoutingModule { +public: + ThreadBoundryModule( int size, Format format ) + : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ), + consumerThread( &consumer ), formatId( format ) + { + } + + void init() + { + } + + void connectTo( Module *m, const Frame &f ) + { + consumer.connectTo( m, f ); + consumerThread.start(0); + } + + void process( const Frame &frame ) + { + readCommandQueue.add( frame ); + } + + const char *name() { return "Thread Boundry Module"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue readCommandQueue; + Consumer consumer; + ConsumerThread consumerThread; + Format formatId; +}; + + +class ProcessBoundryThing : public DispatchInterface { +public: + void dispatch( Command *command ) + { + } +}; + +*/ diff --git a/research/pipeline/Modules/VideoCameraSourceModule.cpp b/research/pipeline/Modules/VideoCameraSourceModule.cpp new file mode 100644 index 0000000..deef2f9 --- /dev/null +++ b/research/pipeline/Modules/VideoCameraSourceModule.cpp @@ -0,0 +1,101 @@ + + +/* +class VideoCameraSourceModule : public SimpleModule { +public: + VideoCameraSourceModule() + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + AVFormatContext *avFormatContext = 0; + AVFormatParameters vp1, *vp = &vp1; + AVInputFormat *fmt1; + memset(vp, 0, sizeof(*vp)); + fmt1 = av_find_input_format("video4linux");//video_grab_format); + vp->device = 0;//"/dev/video";//video_device; + vp->channel = 0;//video_channel; + vp->standard = "pal";//"ntsc";//video_standard; + vp->width = WIDTH; + vp->height = HEIGHT; + vp->frame_rate = 50; + vp->frame_rate_base = 1; + if (av_open_input_file(&avFormatContext, "", fmt1, 0, vp) < 0) { + printf("Could not find video grab device\n"); + exit(1); + } + if ((avFormatContext->ctx_flags & AVFMTCTX_NOHEADER) && av_find_stream_info(avFormatContext) < 0) { + printf("Could not find video grab parameters\n"); + exit(1); + } + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + +// AVCodecContext *videoCodecContext = avcodec_alloc_context(); + AVCodecContext *videoCodecContext = &avFormatContext->streams[0]->codec; + AVCodec *codec = avcodec_find_decoder(avFormatContext->streams[0]->codec.codec_id); + + if ( !codec ) { + printf("error finding decoder\n"); + return; + } + + printf("found decoder: %s\n", codec->name); + + avFormatContext->streams[0]->r_frame_rate = vp->frame_rate; + avFormatContext->streams[0]->r_frame_rate_base = vp->frame_rate_base; + + videoCodecContext->width = vp->width; + videoCodecContext->height = vp->height; + +// if ( avcodec_open( videoCodecContext, &rawvideo_decoder ) < 0 ) { + if ( avcodec_open( videoCodecContext, codec ) < 0 ) { // is rawvideo_decoder + printf("error opening context\n"); + videoCodecContext = 0; + } + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + AVPacket pkt; + while( avFormatContext ) { + if ( av_read_frame(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + AVFrame *picture = avcodec_alloc_frame(); + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = picture; + Frame *currentFrame = new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + currentFrame->ref(); + + int gotPicture = 0; + avcodec_decode_video( videoCodecContext, picture, &gotPicture, pkt.data, pkt.size ); + + if ( gotPicture ) { + yuvFrame->fmt = videoCodecContext->pix_fmt; // is PIX_FMT_YUV422 + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; +// printf("showing frame: %i %ix%i\n", yuvFrame->fmt, yuvFrame->width, yuvFrame->height ); + SimpleModule::process( *currentFrame ); + } + } + } + } + + const char *name() { return "Video Camera Source"; } + Format inputFormat() { return "FRAME_ID_VIDEO_CAMERA_SOURCE"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } +}; +*/ + diff --git a/research/pipeline/PipelineManager.cpp b/research/pipeline/PipelineManager.cpp new file mode 100644 index 0000000..e003559 --- /dev/null +++ b/research/pipeline/PipelineManager.cpp @@ -0,0 +1,162 @@ + +class PipelineManager : public Thread { +public: + PipelineManager(); + void addSource( Format frameType ); + void addDestination( Format frameType ); + void clearTargets(); + void connectTogether(Module *m1, Module *m2, const Frame &f); + void makeConnections(Module *start); + void start( Frame *frame ) { Thread::start( (void *)frame ); } + void execute( void *p ); + void unconnectedRoute( Module *m, const Frame &f ); +private: + std::list sourceModules; + std::list destinationModules; + std::list source; + std::list destination; +}; + + +PipelineManager *pipelineMgr = 0; + + +PipelineManager::PipelineManager() +{ +} + +/* +void PipelineManager::newModule( Module *m ) +{ + printf("adding module: %s\n", m->name() ); + + allModules.push_front( m ); + + // update source modules list + for ( list::iterator it = source.begin(); it != source.end(); ++it ) { + if ( (*it) == m->inputFormat() ) { + sourceModules.push_front( m ); + // Just add it once + break; + } + } + + // update destination modules list + for ( list::iterator it = destination.begin(); it != destination.end(); ++it ) { + if ( (*it) == m->outputFormat() ) { + destinationModules.push_front( m ); + // Just add it once + break; + } + } +} +*/ + +void PipelineManager::addSource( Format frameType ) +{ + // update source modules list + Module *m = moduleMapper()->findModuleWithInputFormat( frameType ); + if ( m ) { + printf("adding source: %s\n", (const char *)frameType ); + source.push_front( frameType ); + sourceModules.push_front( m ); + } else { + printf("No source for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::addDestination( Format frameType ) +{ + Module *m = moduleMapper()->findModuleWithOutputFormat( frameType ); + if ( m ) { + printf("adding destination: %s\n", (const char *)frameType ); + destination.push_front( frameType ); + destinationModules.push_front( m ); + } else { + printf("No destination for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::clearTargets() +{ + sourceModules.clear(); + destinationModules.clear(); + source.clear(); + destination.clear(); +} + +void PipelineManager::connectTogether( Module *m1, Module *m2, const Frame &f ) +{ +/* + //printf(" [%s] -> [%s] %s", m1->outputFormat(), m2->inputFormat(), m2->name() ); + printf(" -> %s", m2->name() ); + + staticDispatch( m2, Init, 0 ); + + if ( m2->isBlocking() || m1->isBlocking() ) { + ThreadBoundryModule *threadModule = new ThreadBoundryModule( 32, m2->inputFormat() ); + threadModule->init(); + m1->connectTo( threadModule, f ); + threadModule->connectTo( m2, f ); + } else { + m1->connectTo( m2, f ); + } +*/ +} + +/* + Connects together module with a module that can process the frame + and then gets the module to process this first frame +*/ +void PipelineManager::unconnectedRoute( Module *m, const Frame &f ) +{ + Module *m2 = moduleMapper()->findModuleWithInputFormat( f.id() ); + if ( m2 ) { + //connectTogether( m, m2, f ); + printf("Connecting together: %s -> %s\n", m->name(), m2->name() ); + staticDispatch( m2, Init, 0 ); + m->connectTo( m2, f ); + m2->connectedFrom( m, f ); + staticDispatch( m2, Process, &f ); + } else { + printf("Didn't find route for %s\n", m->name()); + } +} + +void PipelineManager::makeConnections( Module *start ) +{ +/* + printf("making connections:\n"); + + Frame frame( "UNKNOWN", 0 ); + Module *currentModule = start; + Format dstFmt = destination.front(); + + dispatch( currentModule, Init, 0 ); + printf(" %s (pid: %i)", currentModule->name(), getpid() ); + + while ( currentModule->outputFormat() != dstFmt ) { + Module *m = moduleMapper()->findModuleWithInputFormat( currentModule->outputFormat() ); + if ( m ) { + connectTogether( currentModule, m, frame ); + currentModule = m; + } else { + break; + } + } + printf("\n"); +*/ +} + + +void PipelineManager::execute( void *d ) +{ + printf("starting...\n"); + for ( list::iterator it = sourceModules.begin(); it != sourceModules.end(); ++it ) { + //makeConnections( (*it) ); + staticDispatch( (*it), Init, 0 ); + staticDispatch( (*it), Process, d ); + } +} + + diff --git a/research/pipeline/README.md b/research/pipeline/README.md new file mode 100644 index 0000000..8df026f --- /dev/null +++ b/research/pipeline/README.md @@ -0,0 +1,30 @@ + + +Example sources to support: + +file:/home/user/Documents/images/jpeg/picture.jpg +file:/home/user/Documents/audio/mpeg/greatestsong.mp3 +file:/home/user/Documents/application/playlist/favourites.pls +file:/home/user/Documents/application/playlist/favourites.mpu +http://www.slashdot.org/somefile.mpg +http://www.streaming_radio_server.net:9000 +http://www.streaming_tv_server.net:9000 +camera +microphone +camera & microphone + + +Example outputs to support: + +File/URL +UDP packets +TCP/IP packets +OSS +Alsa +QSS +Visualiser +QDirectPainter +QPainter +XShm +DirectDraw +YUV acceleration diff --git a/research/pipeline/Types/Deadcode.cpp b/research/pipeline/Types/Deadcode.cpp new file mode 100644 index 0000000..d08e52a --- /dev/null +++ b/research/pipeline/Types/Deadcode.cpp @@ -0,0 +1,140 @@ + + +#if 0 + +1 = registerNewFormat("AAC", ".aac", "An AAC decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MP3 decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MAD decoder", AUDIO_CODEC); +1 = registerNewFormat("AAC", ".aac", "My AAC decoder", AUDIO_CODEC); +3 = registerNewFormat("3DS", ".3ds", "3D Studio File", AUDIO_CODEC); + +enum FormatType { + FRAME_ID_FILE_PROTO, + FRAME_ID_HTTP_PROTO, + FRAME_ID_RTSP_PROTO, + FRAME_ID_RTP_PROTO, + FRAME_ID_MMS_PROTO, + + FRAME_ID_GIF_FORMAT, + FRAME_ID_JPG_FORMAT, + FRAME_ID_PNG_FORMAT, + + FRAME_ID_MP3_FORMAT, + FRAME_ID_WAV_FORMAT, + FRAME_ID_GSM_FORMAT, + FRAME_ID_AMR_FORMAT, + + FRAME_ID_MPG_FORMAT, + FRAME_ID_AVI_FORMAT, + FRAME_ID_MP4_FORMAT, + FRAME_ID_MOV_FORMAT, + + FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG_AUDIO_PACKET = FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET, + FRAME_ID_GSM_AUDIO_PACKET, + FRAME_ID_AMR_AUDIO_PACKET, + FRAME_ID_AAC_AUDIO_PACKET, + FRAME_ID_LAST_PACKET_TYPE = FRAME_ID_AMR_AUDIO_PACKET, + + FRAME_ID_VIDEO_PACKET, + FRAME_ID_AUDIO_PACKET, + + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME, + + FRAME_ID_PCM_AUDIO_DATA, + + FRAME_ID_RENDERED_AUDIO, + FRAME_ID_RENDERED_VIDEO, + + FRAME_ID_URL_SOURCE, + FRAME_ID_AUDIO_SOURCE, + FRAME_ID_VIDEO_SOURCE, + + FRAME_ID_MULTIPLE_FORMAT, + FRAME_ID_ANY_ONE_OF_FORMAT, + + FRAME_ID_MULTIPLE_PACKET, + FRAME_ID_ANY_ONE_OF_PACKET, + + FRAME_ID_UNKNOWN +}; + +typedef struct FRAME_GENERIC { +/* + int generalId; + int specificId; + int streamId; +*/ + int bytes; + char* bits; + int pts; +}; + +enum videoCodecId { + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET +}; + +typedef struct FRAME_VIDEO_PACKET { + int codecId; + int bytes; + char* bits; +}; + +enum videoFrameFormat { + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME +}; + +typedef struct FRAME_VIDEO_FRAME { + int format; + int width; + int height; + int bytes; + char* bits; +}; + +struct UpPCMPacket { + int freq; + int bitsPerSample; + int size; + char data[1]; +}; + +struct DownPCMPacket { + +}; + +#endif + + + +/* +struct StreamPacket { + void *private; // AVPacket *pkt; + int streamId; + int size; + char *data; +}; +*/ + +/* +struct StreamPacket { + int streamId; + Frame frame; +}; +*/ + diff --git a/research/pipeline/Types/Format.hpp b/research/pipeline/Types/Format.hpp new file mode 100644 index 0000000..72642b6 --- /dev/null +++ b/research/pipeline/Types/Format.hpp @@ -0,0 +1,29 @@ +#pragma once +#include + +// Format +class Format +{ +public: + Format() : s(nullptr) { } + Format(const Format &other) : s( other.s ) { } + Format(const char *str) : s( str ) { } + bool operator==(const Format& other) + { + return !std::strcmp(other.s, s); + } + operator const char *() + { + return s; + } + bool operator==(const char *other) + { + return !std::strcmp(s, other); + } + bool operator<(const Format& other) const + { + return std::strcmp(other.s, s) < 0; + } +private: + const char *s; +}; diff --git a/research/pipeline/Types/Frame.hpp b/research/pipeline/Types/Frame.hpp new file mode 100644 index 0000000..35ddb08 --- /dev/null +++ b/research/pipeline/Types/Frame.hpp @@ -0,0 +1,51 @@ +#pragma once +#include +#include "Format.hpp" + +// Frame +class Frame +{ +public: + Frame() { } + + Frame( const char* id, void* data ) + : counter( 0 ) + , type( id ) + , bits( data ) + { + pthread_mutex_init( &mutex, NULL ); + } + + void ref() const + { + pthread_mutex_lock( &mutex ); + ++counter; + pthread_mutex_unlock( &mutex ); + } + + void deref() const + { + pthread_mutex_lock( &mutex ); + --counter; + pthread_mutex_unlock( &mutex ); + } + + int refcount() const + { + int ret; + pthread_mutex_lock( &mutex ); + ret = counter; + pthread_mutex_unlock( &mutex ); + return ret; + } + + Format id() const { return type; } + void* data() const { return bits; } + +private: + mutable pthread_mutex_t mutex; + mutable int counter; + Format type; + void *bits; +}; + diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/Modules/ThreadBoundaryModule.cpp b/research/pipeline/Modules/ThreadBoundaryModule.cpp new file mode 100644 index 0000000..e4b07d4 --- /dev/null +++ b/research/pipeline/Modules/ThreadBoundaryModule.cpp @@ -0,0 +1,89 @@ + +/* + +class Consumer : public RoutingModule { +public: + Consumer( CommandQueue* b, Format format ) + : RoutingModule(), buffer( b ), formatId( format ) + { } + + void init() + { + } + + void start() + { + for (;;) { + const Command &command = buffer->remove(); + RoutingModule::command( command.command, command.arg ); + } + } + + const char* name() { return "Consumer"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue *buffer; + Format formatId; +}; + +class ConsumerThread : public Thread { +public: + ConsumerThread( Consumer *c ) + : consumer( c ) + { } + + void execute( void* ) + { + consumer->start(); + } + +private: + Consumer *consumer; +}; + + +class ThreadBoundryModule : public RoutingModule { +public: + ThreadBoundryModule( int size, Format format ) + : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ), + consumerThread( &consumer ), formatId( format ) + { + } + + void init() + { + } + + void connectTo( Module *m, const Frame &f ) + { + consumer.connectTo( m, f ); + consumerThread.start(0); + } + + void process( const Frame &frame ) + { + readCommandQueue.add( frame ); + } + + const char *name() { return "Thread Boundry Module"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue readCommandQueue; + Consumer consumer; + ConsumerThread consumerThread; + Format formatId; +}; + + +class ProcessBoundryThing : public DispatchInterface { +public: + void dispatch( Command *command ) + { + } +}; + +*/ diff --git a/research/pipeline/Modules/VideoCameraSourceModule.cpp b/research/pipeline/Modules/VideoCameraSourceModule.cpp new file mode 100644 index 0000000..deef2f9 --- /dev/null +++ b/research/pipeline/Modules/VideoCameraSourceModule.cpp @@ -0,0 +1,101 @@ + + +/* +class VideoCameraSourceModule : public SimpleModule { +public: + VideoCameraSourceModule() + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + AVFormatContext *avFormatContext = 0; + AVFormatParameters vp1, *vp = &vp1; + AVInputFormat *fmt1; + memset(vp, 0, sizeof(*vp)); + fmt1 = av_find_input_format("video4linux");//video_grab_format); + vp->device = 0;//"/dev/video";//video_device; + vp->channel = 0;//video_channel; + vp->standard = "pal";//"ntsc";//video_standard; + vp->width = WIDTH; + vp->height = HEIGHT; + vp->frame_rate = 50; + vp->frame_rate_base = 1; + if (av_open_input_file(&avFormatContext, "", fmt1, 0, vp) < 0) { + printf("Could not find video grab device\n"); + exit(1); + } + if ((avFormatContext->ctx_flags & AVFMTCTX_NOHEADER) && av_find_stream_info(avFormatContext) < 0) { + printf("Could not find video grab parameters\n"); + exit(1); + } + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + +// AVCodecContext *videoCodecContext = avcodec_alloc_context(); + AVCodecContext *videoCodecContext = &avFormatContext->streams[0]->codec; + AVCodec *codec = avcodec_find_decoder(avFormatContext->streams[0]->codec.codec_id); + + if ( !codec ) { + printf("error finding decoder\n"); + return; + } + + printf("found decoder: %s\n", codec->name); + + avFormatContext->streams[0]->r_frame_rate = vp->frame_rate; + avFormatContext->streams[0]->r_frame_rate_base = vp->frame_rate_base; + + videoCodecContext->width = vp->width; + videoCodecContext->height = vp->height; + +// if ( avcodec_open( videoCodecContext, &rawvideo_decoder ) < 0 ) { + if ( avcodec_open( videoCodecContext, codec ) < 0 ) { // is rawvideo_decoder + printf("error opening context\n"); + videoCodecContext = 0; + } + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + AVPacket pkt; + while( avFormatContext ) { + if ( av_read_frame(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + AVFrame *picture = avcodec_alloc_frame(); + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = picture; + Frame *currentFrame = new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + currentFrame->ref(); + + int gotPicture = 0; + avcodec_decode_video( videoCodecContext, picture, &gotPicture, pkt.data, pkt.size ); + + if ( gotPicture ) { + yuvFrame->fmt = videoCodecContext->pix_fmt; // is PIX_FMT_YUV422 + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; +// printf("showing frame: %i %ix%i\n", yuvFrame->fmt, yuvFrame->width, yuvFrame->height ); + SimpleModule::process( *currentFrame ); + } + } + } + } + + const char *name() { return "Video Camera Source"; } + Format inputFormat() { return "FRAME_ID_VIDEO_CAMERA_SOURCE"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } +}; +*/ + diff --git a/research/pipeline/PipelineManager.cpp b/research/pipeline/PipelineManager.cpp new file mode 100644 index 0000000..e003559 --- /dev/null +++ b/research/pipeline/PipelineManager.cpp @@ -0,0 +1,162 @@ + +class PipelineManager : public Thread { +public: + PipelineManager(); + void addSource( Format frameType ); + void addDestination( Format frameType ); + void clearTargets(); + void connectTogether(Module *m1, Module *m2, const Frame &f); + void makeConnections(Module *start); + void start( Frame *frame ) { Thread::start( (void *)frame ); } + void execute( void *p ); + void unconnectedRoute( Module *m, const Frame &f ); +private: + std::list sourceModules; + std::list destinationModules; + std::list source; + std::list destination; +}; + + +PipelineManager *pipelineMgr = 0; + + +PipelineManager::PipelineManager() +{ +} + +/* +void PipelineManager::newModule( Module *m ) +{ + printf("adding module: %s\n", m->name() ); + + allModules.push_front( m ); + + // update source modules list + for ( list::iterator it = source.begin(); it != source.end(); ++it ) { + if ( (*it) == m->inputFormat() ) { + sourceModules.push_front( m ); + // Just add it once + break; + } + } + + // update destination modules list + for ( list::iterator it = destination.begin(); it != destination.end(); ++it ) { + if ( (*it) == m->outputFormat() ) { + destinationModules.push_front( m ); + // Just add it once + break; + } + } +} +*/ + +void PipelineManager::addSource( Format frameType ) +{ + // update source modules list + Module *m = moduleMapper()->findModuleWithInputFormat( frameType ); + if ( m ) { + printf("adding source: %s\n", (const char *)frameType ); + source.push_front( frameType ); + sourceModules.push_front( m ); + } else { + printf("No source for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::addDestination( Format frameType ) +{ + Module *m = moduleMapper()->findModuleWithOutputFormat( frameType ); + if ( m ) { + printf("adding destination: %s\n", (const char *)frameType ); + destination.push_front( frameType ); + destinationModules.push_front( m ); + } else { + printf("No destination for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::clearTargets() +{ + sourceModules.clear(); + destinationModules.clear(); + source.clear(); + destination.clear(); +} + +void PipelineManager::connectTogether( Module *m1, Module *m2, const Frame &f ) +{ +/* + //printf(" [%s] -> [%s] %s", m1->outputFormat(), m2->inputFormat(), m2->name() ); + printf(" -> %s", m2->name() ); + + staticDispatch( m2, Init, 0 ); + + if ( m2->isBlocking() || m1->isBlocking() ) { + ThreadBoundryModule *threadModule = new ThreadBoundryModule( 32, m2->inputFormat() ); + threadModule->init(); + m1->connectTo( threadModule, f ); + threadModule->connectTo( m2, f ); + } else { + m1->connectTo( m2, f ); + } +*/ +} + +/* + Connects together module with a module that can process the frame + and then gets the module to process this first frame +*/ +void PipelineManager::unconnectedRoute( Module *m, const Frame &f ) +{ + Module *m2 = moduleMapper()->findModuleWithInputFormat( f.id() ); + if ( m2 ) { + //connectTogether( m, m2, f ); + printf("Connecting together: %s -> %s\n", m->name(), m2->name() ); + staticDispatch( m2, Init, 0 ); + m->connectTo( m2, f ); + m2->connectedFrom( m, f ); + staticDispatch( m2, Process, &f ); + } else { + printf("Didn't find route for %s\n", m->name()); + } +} + +void PipelineManager::makeConnections( Module *start ) +{ +/* + printf("making connections:\n"); + + Frame frame( "UNKNOWN", 0 ); + Module *currentModule = start; + Format dstFmt = destination.front(); + + dispatch( currentModule, Init, 0 ); + printf(" %s (pid: %i)", currentModule->name(), getpid() ); + + while ( currentModule->outputFormat() != dstFmt ) { + Module *m = moduleMapper()->findModuleWithInputFormat( currentModule->outputFormat() ); + if ( m ) { + connectTogether( currentModule, m, frame ); + currentModule = m; + } else { + break; + } + } + printf("\n"); +*/ +} + + +void PipelineManager::execute( void *d ) +{ + printf("starting...\n"); + for ( list::iterator it = sourceModules.begin(); it != sourceModules.end(); ++it ) { + //makeConnections( (*it) ); + staticDispatch( (*it), Init, 0 ); + staticDispatch( (*it), Process, d ); + } +} + + diff --git a/research/pipeline/README.md b/research/pipeline/README.md new file mode 100644 index 0000000..8df026f --- /dev/null +++ b/research/pipeline/README.md @@ -0,0 +1,30 @@ + + +Example sources to support: + +file:/home/user/Documents/images/jpeg/picture.jpg +file:/home/user/Documents/audio/mpeg/greatestsong.mp3 +file:/home/user/Documents/application/playlist/favourites.pls +file:/home/user/Documents/application/playlist/favourites.mpu +http://www.slashdot.org/somefile.mpg +http://www.streaming_radio_server.net:9000 +http://www.streaming_tv_server.net:9000 +camera +microphone +camera & microphone + + +Example outputs to support: + +File/URL +UDP packets +TCP/IP packets +OSS +Alsa +QSS +Visualiser +QDirectPainter +QPainter +XShm +DirectDraw +YUV acceleration diff --git a/research/pipeline/Types/Deadcode.cpp b/research/pipeline/Types/Deadcode.cpp new file mode 100644 index 0000000..d08e52a --- /dev/null +++ b/research/pipeline/Types/Deadcode.cpp @@ -0,0 +1,140 @@ + + +#if 0 + +1 = registerNewFormat("AAC", ".aac", "An AAC decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MP3 decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MAD decoder", AUDIO_CODEC); +1 = registerNewFormat("AAC", ".aac", "My AAC decoder", AUDIO_CODEC); +3 = registerNewFormat("3DS", ".3ds", "3D Studio File", AUDIO_CODEC); + +enum FormatType { + FRAME_ID_FILE_PROTO, + FRAME_ID_HTTP_PROTO, + FRAME_ID_RTSP_PROTO, + FRAME_ID_RTP_PROTO, + FRAME_ID_MMS_PROTO, + + FRAME_ID_GIF_FORMAT, + FRAME_ID_JPG_FORMAT, + FRAME_ID_PNG_FORMAT, + + FRAME_ID_MP3_FORMAT, + FRAME_ID_WAV_FORMAT, + FRAME_ID_GSM_FORMAT, + FRAME_ID_AMR_FORMAT, + + FRAME_ID_MPG_FORMAT, + FRAME_ID_AVI_FORMAT, + FRAME_ID_MP4_FORMAT, + FRAME_ID_MOV_FORMAT, + + FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG_AUDIO_PACKET = FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET, + FRAME_ID_GSM_AUDIO_PACKET, + FRAME_ID_AMR_AUDIO_PACKET, + FRAME_ID_AAC_AUDIO_PACKET, + FRAME_ID_LAST_PACKET_TYPE = FRAME_ID_AMR_AUDIO_PACKET, + + FRAME_ID_VIDEO_PACKET, + FRAME_ID_AUDIO_PACKET, + + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME, + + FRAME_ID_PCM_AUDIO_DATA, + + FRAME_ID_RENDERED_AUDIO, + FRAME_ID_RENDERED_VIDEO, + + FRAME_ID_URL_SOURCE, + FRAME_ID_AUDIO_SOURCE, + FRAME_ID_VIDEO_SOURCE, + + FRAME_ID_MULTIPLE_FORMAT, + FRAME_ID_ANY_ONE_OF_FORMAT, + + FRAME_ID_MULTIPLE_PACKET, + FRAME_ID_ANY_ONE_OF_PACKET, + + FRAME_ID_UNKNOWN +}; + +typedef struct FRAME_GENERIC { +/* + int generalId; + int specificId; + int streamId; +*/ + int bytes; + char* bits; + int pts; +}; + +enum videoCodecId { + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET +}; + +typedef struct FRAME_VIDEO_PACKET { + int codecId; + int bytes; + char* bits; +}; + +enum videoFrameFormat { + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME +}; + +typedef struct FRAME_VIDEO_FRAME { + int format; + int width; + int height; + int bytes; + char* bits; +}; + +struct UpPCMPacket { + int freq; + int bitsPerSample; + int size; + char data[1]; +}; + +struct DownPCMPacket { + +}; + +#endif + + + +/* +struct StreamPacket { + void *private; // AVPacket *pkt; + int streamId; + int size; + char *data; +}; +*/ + +/* +struct StreamPacket { + int streamId; + Frame frame; +}; +*/ + diff --git a/research/pipeline/Types/Format.hpp b/research/pipeline/Types/Format.hpp new file mode 100644 index 0000000..72642b6 --- /dev/null +++ b/research/pipeline/Types/Format.hpp @@ -0,0 +1,29 @@ +#pragma once +#include + +// Format +class Format +{ +public: + Format() : s(nullptr) { } + Format(const Format &other) : s( other.s ) { } + Format(const char *str) : s( str ) { } + bool operator==(const Format& other) + { + return !std::strcmp(other.s, s); + } + operator const char *() + { + return s; + } + bool operator==(const char *other) + { + return !std::strcmp(s, other); + } + bool operator<(const Format& other) const + { + return std::strcmp(other.s, s) < 0; + } +private: + const char *s; +}; diff --git a/research/pipeline/Types/Frame.hpp b/research/pipeline/Types/Frame.hpp new file mode 100644 index 0000000..35ddb08 --- /dev/null +++ b/research/pipeline/Types/Frame.hpp @@ -0,0 +1,51 @@ +#pragma once +#include +#include "Format.hpp" + +// Frame +class Frame +{ +public: + Frame() { } + + Frame( const char* id, void* data ) + : counter( 0 ) + , type( id ) + , bits( data ) + { + pthread_mutex_init( &mutex, NULL ); + } + + void ref() const + { + pthread_mutex_lock( &mutex ); + ++counter; + pthread_mutex_unlock( &mutex ); + } + + void deref() const + { + pthread_mutex_lock( &mutex ); + --counter; + pthread_mutex_unlock( &mutex ); + } + + int refcount() const + { + int ret; + pthread_mutex_lock( &mutex ); + ret = counter; + pthread_mutex_unlock( &mutex ); + return ret; + } + + Format id() const { return type; } + void* data() const { return bits; } + +private: + mutable pthread_mutex_t mutex; + mutable int counter; + Format type; + void *bits; +}; + diff --git a/research/pipeline/Types/Module.hpp b/research/pipeline/Types/Module.hpp new file mode 100644 index 0000000..f0ad0fc --- /dev/null +++ b/research/pipeline/Types/Module.hpp @@ -0,0 +1,118 @@ +#pragma once +#include +#include +#include +#include "Frame.hpp" +#include "Format.hpp" + +class Module; + +enum Commands { Init, Pull, Deref, Process, Simulate, ConnectToModule, ConnectedFrom }; + +typedef Module *Address; + +struct Command { + Address address; + Commands command; + const void *arg; +}; + +// CommandQueue +class CommandQueue { +public: + CommandQueue( int size ); + + void add( const Command & ); + const Command &remove(); + +private: + int max; + const Command **commands; + int in, out; + + pthread_mutex_t mutex; + sem_t free; + sem_t used; +}; + +CommandQueue::CommandQueue( int size ) + : max( size ), in( 0 ), out( 0 ) +{ + commands = new const Command*[max]; + pthread_mutex_init( &mutex, NULL ); + sem_init( &free, 0, max ); + sem_init( &used, 0, 0 ); +} + +void CommandQueue::add( const Command &command ) +{ + while( sem_wait( &free ) != 0 ); + pthread_mutex_lock( &mutex ); + + commands[in] = &command; + in = ( in + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &used ); +} + +const Command &CommandQueue::remove() +{ + while( sem_wait( &used ) != 0 ); + pthread_mutex_lock( &mutex ); + + const Command *command = commands[out]; + out = ( out + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &free ); + + return *command; +} + + + +class ModuleFactory { +public: + ModuleFactory() { } + + virtual const char *name() = 0; + + virtual std::list
threadAffinity() = 0; + virtual bool isBlocking() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; + virtual bool supportsInputFormat( Format ) = 0; + virtual bool supportsOutputFormat( Format ) = 0; + + virtual Module *createInstance() = 0; +}; + + + +// Modules +class Module { +public: + Module() { } + + virtual const char *name() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; +// virtual bool constFrameProcessing() = 0; + +// virtual bool supportsInputType( Format ) = 0; + virtual bool supportsOutputType( Format ) = 0; + +// virtual list inputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } +// virtual list outputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } + + virtual bool isBlocking() = 0;//{ return false; } + virtual std::list
threadAffinity() = 0; + +// virtual void command( Command command, const void *arg, bool priorityFlag ) = 0; + virtual void command( Commands command, const void *arg ) = 0; + + virtual void connectTo( Module *next, const Frame &f ) = 0; + virtual void connectedFrom( Module *next, const Frame &f ) = 0; +}; + diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/Modules/ThreadBoundaryModule.cpp b/research/pipeline/Modules/ThreadBoundaryModule.cpp new file mode 100644 index 0000000..e4b07d4 --- /dev/null +++ b/research/pipeline/Modules/ThreadBoundaryModule.cpp @@ -0,0 +1,89 @@ + +/* + +class Consumer : public RoutingModule { +public: + Consumer( CommandQueue* b, Format format ) + : RoutingModule(), buffer( b ), formatId( format ) + { } + + void init() + { + } + + void start() + { + for (;;) { + const Command &command = buffer->remove(); + RoutingModule::command( command.command, command.arg ); + } + } + + const char* name() { return "Consumer"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue *buffer; + Format formatId; +}; + +class ConsumerThread : public Thread { +public: + ConsumerThread( Consumer *c ) + : consumer( c ) + { } + + void execute( void* ) + { + consumer->start(); + } + +private: + Consumer *consumer; +}; + + +class ThreadBoundryModule : public RoutingModule { +public: + ThreadBoundryModule( int size, Format format ) + : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ), + consumerThread( &consumer ), formatId( format ) + { + } + + void init() + { + } + + void connectTo( Module *m, const Frame &f ) + { + consumer.connectTo( m, f ); + consumerThread.start(0); + } + + void process( const Frame &frame ) + { + readCommandQueue.add( frame ); + } + + const char *name() { return "Thread Boundry Module"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue readCommandQueue; + Consumer consumer; + ConsumerThread consumerThread; + Format formatId; +}; + + +class ProcessBoundryThing : public DispatchInterface { +public: + void dispatch( Command *command ) + { + } +}; + +*/ diff --git a/research/pipeline/Modules/VideoCameraSourceModule.cpp b/research/pipeline/Modules/VideoCameraSourceModule.cpp new file mode 100644 index 0000000..deef2f9 --- /dev/null +++ b/research/pipeline/Modules/VideoCameraSourceModule.cpp @@ -0,0 +1,101 @@ + + +/* +class VideoCameraSourceModule : public SimpleModule { +public: + VideoCameraSourceModule() + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + AVFormatContext *avFormatContext = 0; + AVFormatParameters vp1, *vp = &vp1; + AVInputFormat *fmt1; + memset(vp, 0, sizeof(*vp)); + fmt1 = av_find_input_format("video4linux");//video_grab_format); + vp->device = 0;//"/dev/video";//video_device; + vp->channel = 0;//video_channel; + vp->standard = "pal";//"ntsc";//video_standard; + vp->width = WIDTH; + vp->height = HEIGHT; + vp->frame_rate = 50; + vp->frame_rate_base = 1; + if (av_open_input_file(&avFormatContext, "", fmt1, 0, vp) < 0) { + printf("Could not find video grab device\n"); + exit(1); + } + if ((avFormatContext->ctx_flags & AVFMTCTX_NOHEADER) && av_find_stream_info(avFormatContext) < 0) { + printf("Could not find video grab parameters\n"); + exit(1); + } + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + +// AVCodecContext *videoCodecContext = avcodec_alloc_context(); + AVCodecContext *videoCodecContext = &avFormatContext->streams[0]->codec; + AVCodec *codec = avcodec_find_decoder(avFormatContext->streams[0]->codec.codec_id); + + if ( !codec ) { + printf("error finding decoder\n"); + return; + } + + printf("found decoder: %s\n", codec->name); + + avFormatContext->streams[0]->r_frame_rate = vp->frame_rate; + avFormatContext->streams[0]->r_frame_rate_base = vp->frame_rate_base; + + videoCodecContext->width = vp->width; + videoCodecContext->height = vp->height; + +// if ( avcodec_open( videoCodecContext, &rawvideo_decoder ) < 0 ) { + if ( avcodec_open( videoCodecContext, codec ) < 0 ) { // is rawvideo_decoder + printf("error opening context\n"); + videoCodecContext = 0; + } + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + AVPacket pkt; + while( avFormatContext ) { + if ( av_read_frame(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + AVFrame *picture = avcodec_alloc_frame(); + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = picture; + Frame *currentFrame = new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + currentFrame->ref(); + + int gotPicture = 0; + avcodec_decode_video( videoCodecContext, picture, &gotPicture, pkt.data, pkt.size ); + + if ( gotPicture ) { + yuvFrame->fmt = videoCodecContext->pix_fmt; // is PIX_FMT_YUV422 + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; +// printf("showing frame: %i %ix%i\n", yuvFrame->fmt, yuvFrame->width, yuvFrame->height ); + SimpleModule::process( *currentFrame ); + } + } + } + } + + const char *name() { return "Video Camera Source"; } + Format inputFormat() { return "FRAME_ID_VIDEO_CAMERA_SOURCE"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } +}; +*/ + diff --git a/research/pipeline/PipelineManager.cpp b/research/pipeline/PipelineManager.cpp new file mode 100644 index 0000000..e003559 --- /dev/null +++ b/research/pipeline/PipelineManager.cpp @@ -0,0 +1,162 @@ + +class PipelineManager : public Thread { +public: + PipelineManager(); + void addSource( Format frameType ); + void addDestination( Format frameType ); + void clearTargets(); + void connectTogether(Module *m1, Module *m2, const Frame &f); + void makeConnections(Module *start); + void start( Frame *frame ) { Thread::start( (void *)frame ); } + void execute( void *p ); + void unconnectedRoute( Module *m, const Frame &f ); +private: + std::list sourceModules; + std::list destinationModules; + std::list source; + std::list destination; +}; + + +PipelineManager *pipelineMgr = 0; + + +PipelineManager::PipelineManager() +{ +} + +/* +void PipelineManager::newModule( Module *m ) +{ + printf("adding module: %s\n", m->name() ); + + allModules.push_front( m ); + + // update source modules list + for ( list::iterator it = source.begin(); it != source.end(); ++it ) { + if ( (*it) == m->inputFormat() ) { + sourceModules.push_front( m ); + // Just add it once + break; + } + } + + // update destination modules list + for ( list::iterator it = destination.begin(); it != destination.end(); ++it ) { + if ( (*it) == m->outputFormat() ) { + destinationModules.push_front( m ); + // Just add it once + break; + } + } +} +*/ + +void PipelineManager::addSource( Format frameType ) +{ + // update source modules list + Module *m = moduleMapper()->findModuleWithInputFormat( frameType ); + if ( m ) { + printf("adding source: %s\n", (const char *)frameType ); + source.push_front( frameType ); + sourceModules.push_front( m ); + } else { + printf("No source for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::addDestination( Format frameType ) +{ + Module *m = moduleMapper()->findModuleWithOutputFormat( frameType ); + if ( m ) { + printf("adding destination: %s\n", (const char *)frameType ); + destination.push_front( frameType ); + destinationModules.push_front( m ); + } else { + printf("No destination for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::clearTargets() +{ + sourceModules.clear(); + destinationModules.clear(); + source.clear(); + destination.clear(); +} + +void PipelineManager::connectTogether( Module *m1, Module *m2, const Frame &f ) +{ +/* + //printf(" [%s] -> [%s] %s", m1->outputFormat(), m2->inputFormat(), m2->name() ); + printf(" -> %s", m2->name() ); + + staticDispatch( m2, Init, 0 ); + + if ( m2->isBlocking() || m1->isBlocking() ) { + ThreadBoundryModule *threadModule = new ThreadBoundryModule( 32, m2->inputFormat() ); + threadModule->init(); + m1->connectTo( threadModule, f ); + threadModule->connectTo( m2, f ); + } else { + m1->connectTo( m2, f ); + } +*/ +} + +/* + Connects together module with a module that can process the frame + and then gets the module to process this first frame +*/ +void PipelineManager::unconnectedRoute( Module *m, const Frame &f ) +{ + Module *m2 = moduleMapper()->findModuleWithInputFormat( f.id() ); + if ( m2 ) { + //connectTogether( m, m2, f ); + printf("Connecting together: %s -> %s\n", m->name(), m2->name() ); + staticDispatch( m2, Init, 0 ); + m->connectTo( m2, f ); + m2->connectedFrom( m, f ); + staticDispatch( m2, Process, &f ); + } else { + printf("Didn't find route for %s\n", m->name()); + } +} + +void PipelineManager::makeConnections( Module *start ) +{ +/* + printf("making connections:\n"); + + Frame frame( "UNKNOWN", 0 ); + Module *currentModule = start; + Format dstFmt = destination.front(); + + dispatch( currentModule, Init, 0 ); + printf(" %s (pid: %i)", currentModule->name(), getpid() ); + + while ( currentModule->outputFormat() != dstFmt ) { + Module *m = moduleMapper()->findModuleWithInputFormat( currentModule->outputFormat() ); + if ( m ) { + connectTogether( currentModule, m, frame ); + currentModule = m; + } else { + break; + } + } + printf("\n"); +*/ +} + + +void PipelineManager::execute( void *d ) +{ + printf("starting...\n"); + for ( list::iterator it = sourceModules.begin(); it != sourceModules.end(); ++it ) { + //makeConnections( (*it) ); + staticDispatch( (*it), Init, 0 ); + staticDispatch( (*it), Process, d ); + } +} + + diff --git a/research/pipeline/README.md b/research/pipeline/README.md new file mode 100644 index 0000000..8df026f --- /dev/null +++ b/research/pipeline/README.md @@ -0,0 +1,30 @@ + + +Example sources to support: + +file:/home/user/Documents/images/jpeg/picture.jpg +file:/home/user/Documents/audio/mpeg/greatestsong.mp3 +file:/home/user/Documents/application/playlist/favourites.pls +file:/home/user/Documents/application/playlist/favourites.mpu +http://www.slashdot.org/somefile.mpg +http://www.streaming_radio_server.net:9000 +http://www.streaming_tv_server.net:9000 +camera +microphone +camera & microphone + + +Example outputs to support: + +File/URL +UDP packets +TCP/IP packets +OSS +Alsa +QSS +Visualiser +QDirectPainter +QPainter +XShm +DirectDraw +YUV acceleration diff --git a/research/pipeline/Types/Deadcode.cpp b/research/pipeline/Types/Deadcode.cpp new file mode 100644 index 0000000..d08e52a --- /dev/null +++ b/research/pipeline/Types/Deadcode.cpp @@ -0,0 +1,140 @@ + + +#if 0 + +1 = registerNewFormat("AAC", ".aac", "An AAC decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MP3 decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MAD decoder", AUDIO_CODEC); +1 = registerNewFormat("AAC", ".aac", "My AAC decoder", AUDIO_CODEC); +3 = registerNewFormat("3DS", ".3ds", "3D Studio File", AUDIO_CODEC); + +enum FormatType { + FRAME_ID_FILE_PROTO, + FRAME_ID_HTTP_PROTO, + FRAME_ID_RTSP_PROTO, + FRAME_ID_RTP_PROTO, + FRAME_ID_MMS_PROTO, + + FRAME_ID_GIF_FORMAT, + FRAME_ID_JPG_FORMAT, + FRAME_ID_PNG_FORMAT, + + FRAME_ID_MP3_FORMAT, + FRAME_ID_WAV_FORMAT, + FRAME_ID_GSM_FORMAT, + FRAME_ID_AMR_FORMAT, + + FRAME_ID_MPG_FORMAT, + FRAME_ID_AVI_FORMAT, + FRAME_ID_MP4_FORMAT, + FRAME_ID_MOV_FORMAT, + + FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG_AUDIO_PACKET = FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET, + FRAME_ID_GSM_AUDIO_PACKET, + FRAME_ID_AMR_AUDIO_PACKET, + FRAME_ID_AAC_AUDIO_PACKET, + FRAME_ID_LAST_PACKET_TYPE = FRAME_ID_AMR_AUDIO_PACKET, + + FRAME_ID_VIDEO_PACKET, + FRAME_ID_AUDIO_PACKET, + + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME, + + FRAME_ID_PCM_AUDIO_DATA, + + FRAME_ID_RENDERED_AUDIO, + FRAME_ID_RENDERED_VIDEO, + + FRAME_ID_URL_SOURCE, + FRAME_ID_AUDIO_SOURCE, + FRAME_ID_VIDEO_SOURCE, + + FRAME_ID_MULTIPLE_FORMAT, + FRAME_ID_ANY_ONE_OF_FORMAT, + + FRAME_ID_MULTIPLE_PACKET, + FRAME_ID_ANY_ONE_OF_PACKET, + + FRAME_ID_UNKNOWN +}; + +typedef struct FRAME_GENERIC { +/* + int generalId; + int specificId; + int streamId; +*/ + int bytes; + char* bits; + int pts; +}; + +enum videoCodecId { + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET +}; + +typedef struct FRAME_VIDEO_PACKET { + int codecId; + int bytes; + char* bits; +}; + +enum videoFrameFormat { + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME +}; + +typedef struct FRAME_VIDEO_FRAME { + int format; + int width; + int height; + int bytes; + char* bits; +}; + +struct UpPCMPacket { + int freq; + int bitsPerSample; + int size; + char data[1]; +}; + +struct DownPCMPacket { + +}; + +#endif + + + +/* +struct StreamPacket { + void *private; // AVPacket *pkt; + int streamId; + int size; + char *data; +}; +*/ + +/* +struct StreamPacket { + int streamId; + Frame frame; +}; +*/ + diff --git a/research/pipeline/Types/Format.hpp b/research/pipeline/Types/Format.hpp new file mode 100644 index 0000000..72642b6 --- /dev/null +++ b/research/pipeline/Types/Format.hpp @@ -0,0 +1,29 @@ +#pragma once +#include + +// Format +class Format +{ +public: + Format() : s(nullptr) { } + Format(const Format &other) : s( other.s ) { } + Format(const char *str) : s( str ) { } + bool operator==(const Format& other) + { + return !std::strcmp(other.s, s); + } + operator const char *() + { + return s; + } + bool operator==(const char *other) + { + return !std::strcmp(s, other); + } + bool operator<(const Format& other) const + { + return std::strcmp(other.s, s) < 0; + } +private: + const char *s; +}; diff --git a/research/pipeline/Types/Frame.hpp b/research/pipeline/Types/Frame.hpp new file mode 100644 index 0000000..35ddb08 --- /dev/null +++ b/research/pipeline/Types/Frame.hpp @@ -0,0 +1,51 @@ +#pragma once +#include +#include "Format.hpp" + +// Frame +class Frame +{ +public: + Frame() { } + + Frame( const char* id, void* data ) + : counter( 0 ) + , type( id ) + , bits( data ) + { + pthread_mutex_init( &mutex, NULL ); + } + + void ref() const + { + pthread_mutex_lock( &mutex ); + ++counter; + pthread_mutex_unlock( &mutex ); + } + + void deref() const + { + pthread_mutex_lock( &mutex ); + --counter; + pthread_mutex_unlock( &mutex ); + } + + int refcount() const + { + int ret; + pthread_mutex_lock( &mutex ); + ret = counter; + pthread_mutex_unlock( &mutex ); + return ret; + } + + Format id() const { return type; } + void* data() const { return bits; } + +private: + mutable pthread_mutex_t mutex; + mutable int counter; + Format type; + void *bits; +}; + diff --git a/research/pipeline/Types/Module.hpp b/research/pipeline/Types/Module.hpp new file mode 100644 index 0000000..f0ad0fc --- /dev/null +++ b/research/pipeline/Types/Module.hpp @@ -0,0 +1,118 @@ +#pragma once +#include +#include +#include +#include "Frame.hpp" +#include "Format.hpp" + +class Module; + +enum Commands { Init, Pull, Deref, Process, Simulate, ConnectToModule, ConnectedFrom }; + +typedef Module *Address; + +struct Command { + Address address; + Commands command; + const void *arg; +}; + +// CommandQueue +class CommandQueue { +public: + CommandQueue( int size ); + + void add( const Command & ); + const Command &remove(); + +private: + int max; + const Command **commands; + int in, out; + + pthread_mutex_t mutex; + sem_t free; + sem_t used; +}; + +CommandQueue::CommandQueue( int size ) + : max( size ), in( 0 ), out( 0 ) +{ + commands = new const Command*[max]; + pthread_mutex_init( &mutex, NULL ); + sem_init( &free, 0, max ); + sem_init( &used, 0, 0 ); +} + +void CommandQueue::add( const Command &command ) +{ + while( sem_wait( &free ) != 0 ); + pthread_mutex_lock( &mutex ); + + commands[in] = &command; + in = ( in + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &used ); +} + +const Command &CommandQueue::remove() +{ + while( sem_wait( &used ) != 0 ); + pthread_mutex_lock( &mutex ); + + const Command *command = commands[out]; + out = ( out + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &free ); + + return *command; +} + + + +class ModuleFactory { +public: + ModuleFactory() { } + + virtual const char *name() = 0; + + virtual std::list
threadAffinity() = 0; + virtual bool isBlocking() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; + virtual bool supportsInputFormat( Format ) = 0; + virtual bool supportsOutputFormat( Format ) = 0; + + virtual Module *createInstance() = 0; +}; + + + +// Modules +class Module { +public: + Module() { } + + virtual const char *name() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; +// virtual bool constFrameProcessing() = 0; + +// virtual bool supportsInputType( Format ) = 0; + virtual bool supportsOutputType( Format ) = 0; + +// virtual list inputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } +// virtual list outputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } + + virtual bool isBlocking() = 0;//{ return false; } + virtual std::list
threadAffinity() = 0; + +// virtual void command( Command command, const void *arg, bool priorityFlag ) = 0; + virtual void command( Commands command, const void *arg ) = 0; + + virtual void connectTo( Module *next, const Frame &f ) = 0; + virtual void connectedFrom( Module *next, const Frame &f ) = 0; +}; + diff --git a/research/pipeline/Types/PCMData.hpp b/research/pipeline/Types/PCMData.hpp new file mode 100644 index 0000000..57de038 --- /dev/null +++ b/research/pipeline/Types/PCMData.hpp @@ -0,0 +1,7 @@ +#pragma once + +struct PCMData +{ + int size; + char data[65536]; +}; diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/Modules/ThreadBoundaryModule.cpp b/research/pipeline/Modules/ThreadBoundaryModule.cpp new file mode 100644 index 0000000..e4b07d4 --- /dev/null +++ b/research/pipeline/Modules/ThreadBoundaryModule.cpp @@ -0,0 +1,89 @@ + +/* + +class Consumer : public RoutingModule { +public: + Consumer( CommandQueue* b, Format format ) + : RoutingModule(), buffer( b ), formatId( format ) + { } + + void init() + { + } + + void start() + { + for (;;) { + const Command &command = buffer->remove(); + RoutingModule::command( command.command, command.arg ); + } + } + + const char* name() { return "Consumer"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue *buffer; + Format formatId; +}; + +class ConsumerThread : public Thread { +public: + ConsumerThread( Consumer *c ) + : consumer( c ) + { } + + void execute( void* ) + { + consumer->start(); + } + +private: + Consumer *consumer; +}; + + +class ThreadBoundryModule : public RoutingModule { +public: + ThreadBoundryModule( int size, Format format ) + : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ), + consumerThread( &consumer ), formatId( format ) + { + } + + void init() + { + } + + void connectTo( Module *m, const Frame &f ) + { + consumer.connectTo( m, f ); + consumerThread.start(0); + } + + void process( const Frame &frame ) + { + readCommandQueue.add( frame ); + } + + const char *name() { return "Thread Boundry Module"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue readCommandQueue; + Consumer consumer; + ConsumerThread consumerThread; + Format formatId; +}; + + +class ProcessBoundryThing : public DispatchInterface { +public: + void dispatch( Command *command ) + { + } +}; + +*/ diff --git a/research/pipeline/Modules/VideoCameraSourceModule.cpp b/research/pipeline/Modules/VideoCameraSourceModule.cpp new file mode 100644 index 0000000..deef2f9 --- /dev/null +++ b/research/pipeline/Modules/VideoCameraSourceModule.cpp @@ -0,0 +1,101 @@ + + +/* +class VideoCameraSourceModule : public SimpleModule { +public: + VideoCameraSourceModule() + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + AVFormatContext *avFormatContext = 0; + AVFormatParameters vp1, *vp = &vp1; + AVInputFormat *fmt1; + memset(vp, 0, sizeof(*vp)); + fmt1 = av_find_input_format("video4linux");//video_grab_format); + vp->device = 0;//"/dev/video";//video_device; + vp->channel = 0;//video_channel; + vp->standard = "pal";//"ntsc";//video_standard; + vp->width = WIDTH; + vp->height = HEIGHT; + vp->frame_rate = 50; + vp->frame_rate_base = 1; + if (av_open_input_file(&avFormatContext, "", fmt1, 0, vp) < 0) { + printf("Could not find video grab device\n"); + exit(1); + } + if ((avFormatContext->ctx_flags & AVFMTCTX_NOHEADER) && av_find_stream_info(avFormatContext) < 0) { + printf("Could not find video grab parameters\n"); + exit(1); + } + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + +// AVCodecContext *videoCodecContext = avcodec_alloc_context(); + AVCodecContext *videoCodecContext = &avFormatContext->streams[0]->codec; + AVCodec *codec = avcodec_find_decoder(avFormatContext->streams[0]->codec.codec_id); + + if ( !codec ) { + printf("error finding decoder\n"); + return; + } + + printf("found decoder: %s\n", codec->name); + + avFormatContext->streams[0]->r_frame_rate = vp->frame_rate; + avFormatContext->streams[0]->r_frame_rate_base = vp->frame_rate_base; + + videoCodecContext->width = vp->width; + videoCodecContext->height = vp->height; + +// if ( avcodec_open( videoCodecContext, &rawvideo_decoder ) < 0 ) { + if ( avcodec_open( videoCodecContext, codec ) < 0 ) { // is rawvideo_decoder + printf("error opening context\n"); + videoCodecContext = 0; + } + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + AVPacket pkt; + while( avFormatContext ) { + if ( av_read_frame(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + AVFrame *picture = avcodec_alloc_frame(); + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = picture; + Frame *currentFrame = new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + currentFrame->ref(); + + int gotPicture = 0; + avcodec_decode_video( videoCodecContext, picture, &gotPicture, pkt.data, pkt.size ); + + if ( gotPicture ) { + yuvFrame->fmt = videoCodecContext->pix_fmt; // is PIX_FMT_YUV422 + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; +// printf("showing frame: %i %ix%i\n", yuvFrame->fmt, yuvFrame->width, yuvFrame->height ); + SimpleModule::process( *currentFrame ); + } + } + } + } + + const char *name() { return "Video Camera Source"; } + Format inputFormat() { return "FRAME_ID_VIDEO_CAMERA_SOURCE"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } +}; +*/ + diff --git a/research/pipeline/PipelineManager.cpp b/research/pipeline/PipelineManager.cpp new file mode 100644 index 0000000..e003559 --- /dev/null +++ b/research/pipeline/PipelineManager.cpp @@ -0,0 +1,162 @@ + +class PipelineManager : public Thread { +public: + PipelineManager(); + void addSource( Format frameType ); + void addDestination( Format frameType ); + void clearTargets(); + void connectTogether(Module *m1, Module *m2, const Frame &f); + void makeConnections(Module *start); + void start( Frame *frame ) { Thread::start( (void *)frame ); } + void execute( void *p ); + void unconnectedRoute( Module *m, const Frame &f ); +private: + std::list sourceModules; + std::list destinationModules; + std::list source; + std::list destination; +}; + + +PipelineManager *pipelineMgr = 0; + + +PipelineManager::PipelineManager() +{ +} + +/* +void PipelineManager::newModule( Module *m ) +{ + printf("adding module: %s\n", m->name() ); + + allModules.push_front( m ); + + // update source modules list + for ( list::iterator it = source.begin(); it != source.end(); ++it ) { + if ( (*it) == m->inputFormat() ) { + sourceModules.push_front( m ); + // Just add it once + break; + } + } + + // update destination modules list + for ( list::iterator it = destination.begin(); it != destination.end(); ++it ) { + if ( (*it) == m->outputFormat() ) { + destinationModules.push_front( m ); + // Just add it once + break; + } + } +} +*/ + +void PipelineManager::addSource( Format frameType ) +{ + // update source modules list + Module *m = moduleMapper()->findModuleWithInputFormat( frameType ); + if ( m ) { + printf("adding source: %s\n", (const char *)frameType ); + source.push_front( frameType ); + sourceModules.push_front( m ); + } else { + printf("No source for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::addDestination( Format frameType ) +{ + Module *m = moduleMapper()->findModuleWithOutputFormat( frameType ); + if ( m ) { + printf("adding destination: %s\n", (const char *)frameType ); + destination.push_front( frameType ); + destinationModules.push_front( m ); + } else { + printf("No destination for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::clearTargets() +{ + sourceModules.clear(); + destinationModules.clear(); + source.clear(); + destination.clear(); +} + +void PipelineManager::connectTogether( Module *m1, Module *m2, const Frame &f ) +{ +/* + //printf(" [%s] -> [%s] %s", m1->outputFormat(), m2->inputFormat(), m2->name() ); + printf(" -> %s", m2->name() ); + + staticDispatch( m2, Init, 0 ); + + if ( m2->isBlocking() || m1->isBlocking() ) { + ThreadBoundryModule *threadModule = new ThreadBoundryModule( 32, m2->inputFormat() ); + threadModule->init(); + m1->connectTo( threadModule, f ); + threadModule->connectTo( m2, f ); + } else { + m1->connectTo( m2, f ); + } +*/ +} + +/* + Connects together module with a module that can process the frame + and then gets the module to process this first frame +*/ +void PipelineManager::unconnectedRoute( Module *m, const Frame &f ) +{ + Module *m2 = moduleMapper()->findModuleWithInputFormat( f.id() ); + if ( m2 ) { + //connectTogether( m, m2, f ); + printf("Connecting together: %s -> %s\n", m->name(), m2->name() ); + staticDispatch( m2, Init, 0 ); + m->connectTo( m2, f ); + m2->connectedFrom( m, f ); + staticDispatch( m2, Process, &f ); + } else { + printf("Didn't find route for %s\n", m->name()); + } +} + +void PipelineManager::makeConnections( Module *start ) +{ +/* + printf("making connections:\n"); + + Frame frame( "UNKNOWN", 0 ); + Module *currentModule = start; + Format dstFmt = destination.front(); + + dispatch( currentModule, Init, 0 ); + printf(" %s (pid: %i)", currentModule->name(), getpid() ); + + while ( currentModule->outputFormat() != dstFmt ) { + Module *m = moduleMapper()->findModuleWithInputFormat( currentModule->outputFormat() ); + if ( m ) { + connectTogether( currentModule, m, frame ); + currentModule = m; + } else { + break; + } + } + printf("\n"); +*/ +} + + +void PipelineManager::execute( void *d ) +{ + printf("starting...\n"); + for ( list::iterator it = sourceModules.begin(); it != sourceModules.end(); ++it ) { + //makeConnections( (*it) ); + staticDispatch( (*it), Init, 0 ); + staticDispatch( (*it), Process, d ); + } +} + + diff --git a/research/pipeline/README.md b/research/pipeline/README.md new file mode 100644 index 0000000..8df026f --- /dev/null +++ b/research/pipeline/README.md @@ -0,0 +1,30 @@ + + +Example sources to support: + +file:/home/user/Documents/images/jpeg/picture.jpg +file:/home/user/Documents/audio/mpeg/greatestsong.mp3 +file:/home/user/Documents/application/playlist/favourites.pls +file:/home/user/Documents/application/playlist/favourites.mpu +http://www.slashdot.org/somefile.mpg +http://www.streaming_radio_server.net:9000 +http://www.streaming_tv_server.net:9000 +camera +microphone +camera & microphone + + +Example outputs to support: + +File/URL +UDP packets +TCP/IP packets +OSS +Alsa +QSS +Visualiser +QDirectPainter +QPainter +XShm +DirectDraw +YUV acceleration diff --git a/research/pipeline/Types/Deadcode.cpp b/research/pipeline/Types/Deadcode.cpp new file mode 100644 index 0000000..d08e52a --- /dev/null +++ b/research/pipeline/Types/Deadcode.cpp @@ -0,0 +1,140 @@ + + +#if 0 + +1 = registerNewFormat("AAC", ".aac", "An AAC decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MP3 decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MAD decoder", AUDIO_CODEC); +1 = registerNewFormat("AAC", ".aac", "My AAC decoder", AUDIO_CODEC); +3 = registerNewFormat("3DS", ".3ds", "3D Studio File", AUDIO_CODEC); + +enum FormatType { + FRAME_ID_FILE_PROTO, + FRAME_ID_HTTP_PROTO, + FRAME_ID_RTSP_PROTO, + FRAME_ID_RTP_PROTO, + FRAME_ID_MMS_PROTO, + + FRAME_ID_GIF_FORMAT, + FRAME_ID_JPG_FORMAT, + FRAME_ID_PNG_FORMAT, + + FRAME_ID_MP3_FORMAT, + FRAME_ID_WAV_FORMAT, + FRAME_ID_GSM_FORMAT, + FRAME_ID_AMR_FORMAT, + + FRAME_ID_MPG_FORMAT, + FRAME_ID_AVI_FORMAT, + FRAME_ID_MP4_FORMAT, + FRAME_ID_MOV_FORMAT, + + FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG_AUDIO_PACKET = FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET, + FRAME_ID_GSM_AUDIO_PACKET, + FRAME_ID_AMR_AUDIO_PACKET, + FRAME_ID_AAC_AUDIO_PACKET, + FRAME_ID_LAST_PACKET_TYPE = FRAME_ID_AMR_AUDIO_PACKET, + + FRAME_ID_VIDEO_PACKET, + FRAME_ID_AUDIO_PACKET, + + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME, + + FRAME_ID_PCM_AUDIO_DATA, + + FRAME_ID_RENDERED_AUDIO, + FRAME_ID_RENDERED_VIDEO, + + FRAME_ID_URL_SOURCE, + FRAME_ID_AUDIO_SOURCE, + FRAME_ID_VIDEO_SOURCE, + + FRAME_ID_MULTIPLE_FORMAT, + FRAME_ID_ANY_ONE_OF_FORMAT, + + FRAME_ID_MULTIPLE_PACKET, + FRAME_ID_ANY_ONE_OF_PACKET, + + FRAME_ID_UNKNOWN +}; + +typedef struct FRAME_GENERIC { +/* + int generalId; + int specificId; + int streamId; +*/ + int bytes; + char* bits; + int pts; +}; + +enum videoCodecId { + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET +}; + +typedef struct FRAME_VIDEO_PACKET { + int codecId; + int bytes; + char* bits; +}; + +enum videoFrameFormat { + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME +}; + +typedef struct FRAME_VIDEO_FRAME { + int format; + int width; + int height; + int bytes; + char* bits; +}; + +struct UpPCMPacket { + int freq; + int bitsPerSample; + int size; + char data[1]; +}; + +struct DownPCMPacket { + +}; + +#endif + + + +/* +struct StreamPacket { + void *private; // AVPacket *pkt; + int streamId; + int size; + char *data; +}; +*/ + +/* +struct StreamPacket { + int streamId; + Frame frame; +}; +*/ + diff --git a/research/pipeline/Types/Format.hpp b/research/pipeline/Types/Format.hpp new file mode 100644 index 0000000..72642b6 --- /dev/null +++ b/research/pipeline/Types/Format.hpp @@ -0,0 +1,29 @@ +#pragma once +#include + +// Format +class Format +{ +public: + Format() : s(nullptr) { } + Format(const Format &other) : s( other.s ) { } + Format(const char *str) : s( str ) { } + bool operator==(const Format& other) + { + return !std::strcmp(other.s, s); + } + operator const char *() + { + return s; + } + bool operator==(const char *other) + { + return !std::strcmp(s, other); + } + bool operator<(const Format& other) const + { + return std::strcmp(other.s, s) < 0; + } +private: + const char *s; +}; diff --git a/research/pipeline/Types/Frame.hpp b/research/pipeline/Types/Frame.hpp new file mode 100644 index 0000000..35ddb08 --- /dev/null +++ b/research/pipeline/Types/Frame.hpp @@ -0,0 +1,51 @@ +#pragma once +#include +#include "Format.hpp" + +// Frame +class Frame +{ +public: + Frame() { } + + Frame( const char* id, void* data ) + : counter( 0 ) + , type( id ) + , bits( data ) + { + pthread_mutex_init( &mutex, NULL ); + } + + void ref() const + { + pthread_mutex_lock( &mutex ); + ++counter; + pthread_mutex_unlock( &mutex ); + } + + void deref() const + { + pthread_mutex_lock( &mutex ); + --counter; + pthread_mutex_unlock( &mutex ); + } + + int refcount() const + { + int ret; + pthread_mutex_lock( &mutex ); + ret = counter; + pthread_mutex_unlock( &mutex ); + return ret; + } + + Format id() const { return type; } + void* data() const { return bits; } + +private: + mutable pthread_mutex_t mutex; + mutable int counter; + Format type; + void *bits; +}; + diff --git a/research/pipeline/Types/Module.hpp b/research/pipeline/Types/Module.hpp new file mode 100644 index 0000000..f0ad0fc --- /dev/null +++ b/research/pipeline/Types/Module.hpp @@ -0,0 +1,118 @@ +#pragma once +#include +#include +#include +#include "Frame.hpp" +#include "Format.hpp" + +class Module; + +enum Commands { Init, Pull, Deref, Process, Simulate, ConnectToModule, ConnectedFrom }; + +typedef Module *Address; + +struct Command { + Address address; + Commands command; + const void *arg; +}; + +// CommandQueue +class CommandQueue { +public: + CommandQueue( int size ); + + void add( const Command & ); + const Command &remove(); + +private: + int max; + const Command **commands; + int in, out; + + pthread_mutex_t mutex; + sem_t free; + sem_t used; +}; + +CommandQueue::CommandQueue( int size ) + : max( size ), in( 0 ), out( 0 ) +{ + commands = new const Command*[max]; + pthread_mutex_init( &mutex, NULL ); + sem_init( &free, 0, max ); + sem_init( &used, 0, 0 ); +} + +void CommandQueue::add( const Command &command ) +{ + while( sem_wait( &free ) != 0 ); + pthread_mutex_lock( &mutex ); + + commands[in] = &command; + in = ( in + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &used ); +} + +const Command &CommandQueue::remove() +{ + while( sem_wait( &used ) != 0 ); + pthread_mutex_lock( &mutex ); + + const Command *command = commands[out]; + out = ( out + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &free ); + + return *command; +} + + + +class ModuleFactory { +public: + ModuleFactory() { } + + virtual const char *name() = 0; + + virtual std::list
threadAffinity() = 0; + virtual bool isBlocking() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; + virtual bool supportsInputFormat( Format ) = 0; + virtual bool supportsOutputFormat( Format ) = 0; + + virtual Module *createInstance() = 0; +}; + + + +// Modules +class Module { +public: + Module() { } + + virtual const char *name() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; +// virtual bool constFrameProcessing() = 0; + +// virtual bool supportsInputType( Format ) = 0; + virtual bool supportsOutputType( Format ) = 0; + +// virtual list inputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } +// virtual list outputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } + + virtual bool isBlocking() = 0;//{ return false; } + virtual std::list
threadAffinity() = 0; + +// virtual void command( Command command, const void *arg, bool priorityFlag ) = 0; + virtual void command( Commands command, const void *arg ) = 0; + + virtual void connectTo( Module *next, const Frame &f ) = 0; + virtual void connectedFrom( Module *next, const Frame &f ) = 0; +}; + diff --git a/research/pipeline/Types/PCMData.hpp b/research/pipeline/Types/PCMData.hpp new file mode 100644 index 0000000..57de038 --- /dev/null +++ b/research/pipeline/Types/PCMData.hpp @@ -0,0 +1,7 @@ +#pragma once + +struct PCMData +{ + int size; + char data[65536]; +}; diff --git a/research/pipeline/Types/Thread.hpp b/research/pipeline/Types/Thread.hpp new file mode 100644 index 0000000..d7922a2 --- /dev/null +++ b/research/pipeline/Types/Thread.hpp @@ -0,0 +1,41 @@ +#pragma once + +// Utils +class Thread { +public: + Thread(); + int start( void* arg ); + +protected: + int run( void* arg ); + static void* entryPoint( void* ); + virtual void setup() { }; + virtual void execute( void* ) = 0; + void* arg() const { return arg_; } + void setArg( void* a ) { arg_ = a; } + +private: + pthread_t tid_; + void* arg_; +}; + +Thread::Thread() {} + +int Thread::start( void* arg ) +{ + setArg(arg); + return pthread_create( &tid_, 0, Thread::entryPoint, this ); +} + +int Thread::run( void* arg ) +{ + printf(" (pid: %i)", getpid() ); + setup(); + execute( arg ); +} + +void* Thread::entryPoint( void* pthis ) +{ + Thread* pt = (Thread*)pthis; + pt->run( pt->arg() ); +} diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/Modules/ThreadBoundaryModule.cpp b/research/pipeline/Modules/ThreadBoundaryModule.cpp new file mode 100644 index 0000000..e4b07d4 --- /dev/null +++ b/research/pipeline/Modules/ThreadBoundaryModule.cpp @@ -0,0 +1,89 @@ + +/* + +class Consumer : public RoutingModule { +public: + Consumer( CommandQueue* b, Format format ) + : RoutingModule(), buffer( b ), formatId( format ) + { } + + void init() + { + } + + void start() + { + for (;;) { + const Command &command = buffer->remove(); + RoutingModule::command( command.command, command.arg ); + } + } + + const char* name() { return "Consumer"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue *buffer; + Format formatId; +}; + +class ConsumerThread : public Thread { +public: + ConsumerThread( Consumer *c ) + : consumer( c ) + { } + + void execute( void* ) + { + consumer->start(); + } + +private: + Consumer *consumer; +}; + + +class ThreadBoundryModule : public RoutingModule { +public: + ThreadBoundryModule( int size, Format format ) + : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ), + consumerThread( &consumer ), formatId( format ) + { + } + + void init() + { + } + + void connectTo( Module *m, const Frame &f ) + { + consumer.connectTo( m, f ); + consumerThread.start(0); + } + + void process( const Frame &frame ) + { + readCommandQueue.add( frame ); + } + + const char *name() { return "Thread Boundry Module"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue readCommandQueue; + Consumer consumer; + ConsumerThread consumerThread; + Format formatId; +}; + + +class ProcessBoundryThing : public DispatchInterface { +public: + void dispatch( Command *command ) + { + } +}; + +*/ diff --git a/research/pipeline/Modules/VideoCameraSourceModule.cpp b/research/pipeline/Modules/VideoCameraSourceModule.cpp new file mode 100644 index 0000000..deef2f9 --- /dev/null +++ b/research/pipeline/Modules/VideoCameraSourceModule.cpp @@ -0,0 +1,101 @@ + + +/* +class VideoCameraSourceModule : public SimpleModule { +public: + VideoCameraSourceModule() + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + AVFormatContext *avFormatContext = 0; + AVFormatParameters vp1, *vp = &vp1; + AVInputFormat *fmt1; + memset(vp, 0, sizeof(*vp)); + fmt1 = av_find_input_format("video4linux");//video_grab_format); + vp->device = 0;//"/dev/video";//video_device; + vp->channel = 0;//video_channel; + vp->standard = "pal";//"ntsc";//video_standard; + vp->width = WIDTH; + vp->height = HEIGHT; + vp->frame_rate = 50; + vp->frame_rate_base = 1; + if (av_open_input_file(&avFormatContext, "", fmt1, 0, vp) < 0) { + printf("Could not find video grab device\n"); + exit(1); + } + if ((avFormatContext->ctx_flags & AVFMTCTX_NOHEADER) && av_find_stream_info(avFormatContext) < 0) { + printf("Could not find video grab parameters\n"); + exit(1); + } + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + +// AVCodecContext *videoCodecContext = avcodec_alloc_context(); + AVCodecContext *videoCodecContext = &avFormatContext->streams[0]->codec; + AVCodec *codec = avcodec_find_decoder(avFormatContext->streams[0]->codec.codec_id); + + if ( !codec ) { + printf("error finding decoder\n"); + return; + } + + printf("found decoder: %s\n", codec->name); + + avFormatContext->streams[0]->r_frame_rate = vp->frame_rate; + avFormatContext->streams[0]->r_frame_rate_base = vp->frame_rate_base; + + videoCodecContext->width = vp->width; + videoCodecContext->height = vp->height; + +// if ( avcodec_open( videoCodecContext, &rawvideo_decoder ) < 0 ) { + if ( avcodec_open( videoCodecContext, codec ) < 0 ) { // is rawvideo_decoder + printf("error opening context\n"); + videoCodecContext = 0; + } + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + AVPacket pkt; + while( avFormatContext ) { + if ( av_read_frame(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + AVFrame *picture = avcodec_alloc_frame(); + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = picture; + Frame *currentFrame = new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + currentFrame->ref(); + + int gotPicture = 0; + avcodec_decode_video( videoCodecContext, picture, &gotPicture, pkt.data, pkt.size ); + + if ( gotPicture ) { + yuvFrame->fmt = videoCodecContext->pix_fmt; // is PIX_FMT_YUV422 + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; +// printf("showing frame: %i %ix%i\n", yuvFrame->fmt, yuvFrame->width, yuvFrame->height ); + SimpleModule::process( *currentFrame ); + } + } + } + } + + const char *name() { return "Video Camera Source"; } + Format inputFormat() { return "FRAME_ID_VIDEO_CAMERA_SOURCE"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } +}; +*/ + diff --git a/research/pipeline/PipelineManager.cpp b/research/pipeline/PipelineManager.cpp new file mode 100644 index 0000000..e003559 --- /dev/null +++ b/research/pipeline/PipelineManager.cpp @@ -0,0 +1,162 @@ + +class PipelineManager : public Thread { +public: + PipelineManager(); + void addSource( Format frameType ); + void addDestination( Format frameType ); + void clearTargets(); + void connectTogether(Module *m1, Module *m2, const Frame &f); + void makeConnections(Module *start); + void start( Frame *frame ) { Thread::start( (void *)frame ); } + void execute( void *p ); + void unconnectedRoute( Module *m, const Frame &f ); +private: + std::list sourceModules; + std::list destinationModules; + std::list source; + std::list destination; +}; + + +PipelineManager *pipelineMgr = 0; + + +PipelineManager::PipelineManager() +{ +} + +/* +void PipelineManager::newModule( Module *m ) +{ + printf("adding module: %s\n", m->name() ); + + allModules.push_front( m ); + + // update source modules list + for ( list::iterator it = source.begin(); it != source.end(); ++it ) { + if ( (*it) == m->inputFormat() ) { + sourceModules.push_front( m ); + // Just add it once + break; + } + } + + // update destination modules list + for ( list::iterator it = destination.begin(); it != destination.end(); ++it ) { + if ( (*it) == m->outputFormat() ) { + destinationModules.push_front( m ); + // Just add it once + break; + } + } +} +*/ + +void PipelineManager::addSource( Format frameType ) +{ + // update source modules list + Module *m = moduleMapper()->findModuleWithInputFormat( frameType ); + if ( m ) { + printf("adding source: %s\n", (const char *)frameType ); + source.push_front( frameType ); + sourceModules.push_front( m ); + } else { + printf("No source for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::addDestination( Format frameType ) +{ + Module *m = moduleMapper()->findModuleWithOutputFormat( frameType ); + if ( m ) { + printf("adding destination: %s\n", (const char *)frameType ); + destination.push_front( frameType ); + destinationModules.push_front( m ); + } else { + printf("No destination for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::clearTargets() +{ + sourceModules.clear(); + destinationModules.clear(); + source.clear(); + destination.clear(); +} + +void PipelineManager::connectTogether( Module *m1, Module *m2, const Frame &f ) +{ +/* + //printf(" [%s] -> [%s] %s", m1->outputFormat(), m2->inputFormat(), m2->name() ); + printf(" -> %s", m2->name() ); + + staticDispatch( m2, Init, 0 ); + + if ( m2->isBlocking() || m1->isBlocking() ) { + ThreadBoundryModule *threadModule = new ThreadBoundryModule( 32, m2->inputFormat() ); + threadModule->init(); + m1->connectTo( threadModule, f ); + threadModule->connectTo( m2, f ); + } else { + m1->connectTo( m2, f ); + } +*/ +} + +/* + Connects together module with a module that can process the frame + and then gets the module to process this first frame +*/ +void PipelineManager::unconnectedRoute( Module *m, const Frame &f ) +{ + Module *m2 = moduleMapper()->findModuleWithInputFormat( f.id() ); + if ( m2 ) { + //connectTogether( m, m2, f ); + printf("Connecting together: %s -> %s\n", m->name(), m2->name() ); + staticDispatch( m2, Init, 0 ); + m->connectTo( m2, f ); + m2->connectedFrom( m, f ); + staticDispatch( m2, Process, &f ); + } else { + printf("Didn't find route for %s\n", m->name()); + } +} + +void PipelineManager::makeConnections( Module *start ) +{ +/* + printf("making connections:\n"); + + Frame frame( "UNKNOWN", 0 ); + Module *currentModule = start; + Format dstFmt = destination.front(); + + dispatch( currentModule, Init, 0 ); + printf(" %s (pid: %i)", currentModule->name(), getpid() ); + + while ( currentModule->outputFormat() != dstFmt ) { + Module *m = moduleMapper()->findModuleWithInputFormat( currentModule->outputFormat() ); + if ( m ) { + connectTogether( currentModule, m, frame ); + currentModule = m; + } else { + break; + } + } + printf("\n"); +*/ +} + + +void PipelineManager::execute( void *d ) +{ + printf("starting...\n"); + for ( list::iterator it = sourceModules.begin(); it != sourceModules.end(); ++it ) { + //makeConnections( (*it) ); + staticDispatch( (*it), Init, 0 ); + staticDispatch( (*it), Process, d ); + } +} + + diff --git a/research/pipeline/README.md b/research/pipeline/README.md new file mode 100644 index 0000000..8df026f --- /dev/null +++ b/research/pipeline/README.md @@ -0,0 +1,30 @@ + + +Example sources to support: + +file:/home/user/Documents/images/jpeg/picture.jpg +file:/home/user/Documents/audio/mpeg/greatestsong.mp3 +file:/home/user/Documents/application/playlist/favourites.pls +file:/home/user/Documents/application/playlist/favourites.mpu +http://www.slashdot.org/somefile.mpg +http://www.streaming_radio_server.net:9000 +http://www.streaming_tv_server.net:9000 +camera +microphone +camera & microphone + + +Example outputs to support: + +File/URL +UDP packets +TCP/IP packets +OSS +Alsa +QSS +Visualiser +QDirectPainter +QPainter +XShm +DirectDraw +YUV acceleration diff --git a/research/pipeline/Types/Deadcode.cpp b/research/pipeline/Types/Deadcode.cpp new file mode 100644 index 0000000..d08e52a --- /dev/null +++ b/research/pipeline/Types/Deadcode.cpp @@ -0,0 +1,140 @@ + + +#if 0 + +1 = registerNewFormat("AAC", ".aac", "An AAC decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MP3 decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MAD decoder", AUDIO_CODEC); +1 = registerNewFormat("AAC", ".aac", "My AAC decoder", AUDIO_CODEC); +3 = registerNewFormat("3DS", ".3ds", "3D Studio File", AUDIO_CODEC); + +enum FormatType { + FRAME_ID_FILE_PROTO, + FRAME_ID_HTTP_PROTO, + FRAME_ID_RTSP_PROTO, + FRAME_ID_RTP_PROTO, + FRAME_ID_MMS_PROTO, + + FRAME_ID_GIF_FORMAT, + FRAME_ID_JPG_FORMAT, + FRAME_ID_PNG_FORMAT, + + FRAME_ID_MP3_FORMAT, + FRAME_ID_WAV_FORMAT, + FRAME_ID_GSM_FORMAT, + FRAME_ID_AMR_FORMAT, + + FRAME_ID_MPG_FORMAT, + FRAME_ID_AVI_FORMAT, + FRAME_ID_MP4_FORMAT, + FRAME_ID_MOV_FORMAT, + + FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG_AUDIO_PACKET = FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET, + FRAME_ID_GSM_AUDIO_PACKET, + FRAME_ID_AMR_AUDIO_PACKET, + FRAME_ID_AAC_AUDIO_PACKET, + FRAME_ID_LAST_PACKET_TYPE = FRAME_ID_AMR_AUDIO_PACKET, + + FRAME_ID_VIDEO_PACKET, + FRAME_ID_AUDIO_PACKET, + + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME, + + FRAME_ID_PCM_AUDIO_DATA, + + FRAME_ID_RENDERED_AUDIO, + FRAME_ID_RENDERED_VIDEO, + + FRAME_ID_URL_SOURCE, + FRAME_ID_AUDIO_SOURCE, + FRAME_ID_VIDEO_SOURCE, + + FRAME_ID_MULTIPLE_FORMAT, + FRAME_ID_ANY_ONE_OF_FORMAT, + + FRAME_ID_MULTIPLE_PACKET, + FRAME_ID_ANY_ONE_OF_PACKET, + + FRAME_ID_UNKNOWN +}; + +typedef struct FRAME_GENERIC { +/* + int generalId; + int specificId; + int streamId; +*/ + int bytes; + char* bits; + int pts; +}; + +enum videoCodecId { + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET +}; + +typedef struct FRAME_VIDEO_PACKET { + int codecId; + int bytes; + char* bits; +}; + +enum videoFrameFormat { + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME +}; + +typedef struct FRAME_VIDEO_FRAME { + int format; + int width; + int height; + int bytes; + char* bits; +}; + +struct UpPCMPacket { + int freq; + int bitsPerSample; + int size; + char data[1]; +}; + +struct DownPCMPacket { + +}; + +#endif + + + +/* +struct StreamPacket { + void *private; // AVPacket *pkt; + int streamId; + int size; + char *data; +}; +*/ + +/* +struct StreamPacket { + int streamId; + Frame frame; +}; +*/ + diff --git a/research/pipeline/Types/Format.hpp b/research/pipeline/Types/Format.hpp new file mode 100644 index 0000000..72642b6 --- /dev/null +++ b/research/pipeline/Types/Format.hpp @@ -0,0 +1,29 @@ +#pragma once +#include + +// Format +class Format +{ +public: + Format() : s(nullptr) { } + Format(const Format &other) : s( other.s ) { } + Format(const char *str) : s( str ) { } + bool operator==(const Format& other) + { + return !std::strcmp(other.s, s); + } + operator const char *() + { + return s; + } + bool operator==(const char *other) + { + return !std::strcmp(s, other); + } + bool operator<(const Format& other) const + { + return std::strcmp(other.s, s) < 0; + } +private: + const char *s; +}; diff --git a/research/pipeline/Types/Frame.hpp b/research/pipeline/Types/Frame.hpp new file mode 100644 index 0000000..35ddb08 --- /dev/null +++ b/research/pipeline/Types/Frame.hpp @@ -0,0 +1,51 @@ +#pragma once +#include +#include "Format.hpp" + +// Frame +class Frame +{ +public: + Frame() { } + + Frame( const char* id, void* data ) + : counter( 0 ) + , type( id ) + , bits( data ) + { + pthread_mutex_init( &mutex, NULL ); + } + + void ref() const + { + pthread_mutex_lock( &mutex ); + ++counter; + pthread_mutex_unlock( &mutex ); + } + + void deref() const + { + pthread_mutex_lock( &mutex ); + --counter; + pthread_mutex_unlock( &mutex ); + } + + int refcount() const + { + int ret; + pthread_mutex_lock( &mutex ); + ret = counter; + pthread_mutex_unlock( &mutex ); + return ret; + } + + Format id() const { return type; } + void* data() const { return bits; } + +private: + mutable pthread_mutex_t mutex; + mutable int counter; + Format type; + void *bits; +}; + diff --git a/research/pipeline/Types/Module.hpp b/research/pipeline/Types/Module.hpp new file mode 100644 index 0000000..f0ad0fc --- /dev/null +++ b/research/pipeline/Types/Module.hpp @@ -0,0 +1,118 @@ +#pragma once +#include +#include +#include +#include "Frame.hpp" +#include "Format.hpp" + +class Module; + +enum Commands { Init, Pull, Deref, Process, Simulate, ConnectToModule, ConnectedFrom }; + +typedef Module *Address; + +struct Command { + Address address; + Commands command; + const void *arg; +}; + +// CommandQueue +class CommandQueue { +public: + CommandQueue( int size ); + + void add( const Command & ); + const Command &remove(); + +private: + int max; + const Command **commands; + int in, out; + + pthread_mutex_t mutex; + sem_t free; + sem_t used; +}; + +CommandQueue::CommandQueue( int size ) + : max( size ), in( 0 ), out( 0 ) +{ + commands = new const Command*[max]; + pthread_mutex_init( &mutex, NULL ); + sem_init( &free, 0, max ); + sem_init( &used, 0, 0 ); +} + +void CommandQueue::add( const Command &command ) +{ + while( sem_wait( &free ) != 0 ); + pthread_mutex_lock( &mutex ); + + commands[in] = &command; + in = ( in + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &used ); +} + +const Command &CommandQueue::remove() +{ + while( sem_wait( &used ) != 0 ); + pthread_mutex_lock( &mutex ); + + const Command *command = commands[out]; + out = ( out + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &free ); + + return *command; +} + + + +class ModuleFactory { +public: + ModuleFactory() { } + + virtual const char *name() = 0; + + virtual std::list
threadAffinity() = 0; + virtual bool isBlocking() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; + virtual bool supportsInputFormat( Format ) = 0; + virtual bool supportsOutputFormat( Format ) = 0; + + virtual Module *createInstance() = 0; +}; + + + +// Modules +class Module { +public: + Module() { } + + virtual const char *name() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; +// virtual bool constFrameProcessing() = 0; + +// virtual bool supportsInputType( Format ) = 0; + virtual bool supportsOutputType( Format ) = 0; + +// virtual list inputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } +// virtual list outputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } + + virtual bool isBlocking() = 0;//{ return false; } + virtual std::list
threadAffinity() = 0; + +// virtual void command( Command command, const void *arg, bool priorityFlag ) = 0; + virtual void command( Commands command, const void *arg ) = 0; + + virtual void connectTo( Module *next, const Frame &f ) = 0; + virtual void connectedFrom( Module *next, const Frame &f ) = 0; +}; + diff --git a/research/pipeline/Types/PCMData.hpp b/research/pipeline/Types/PCMData.hpp new file mode 100644 index 0000000..57de038 --- /dev/null +++ b/research/pipeline/Types/PCMData.hpp @@ -0,0 +1,7 @@ +#pragma once + +struct PCMData +{ + int size; + char data[65536]; +}; diff --git a/research/pipeline/Types/Thread.hpp b/research/pipeline/Types/Thread.hpp new file mode 100644 index 0000000..d7922a2 --- /dev/null +++ b/research/pipeline/Types/Thread.hpp @@ -0,0 +1,41 @@ +#pragma once + +// Utils +class Thread { +public: + Thread(); + int start( void* arg ); + +protected: + int run( void* arg ); + static void* entryPoint( void* ); + virtual void setup() { }; + virtual void execute( void* ) = 0; + void* arg() const { return arg_; } + void setArg( void* a ) { arg_ = a; } + +private: + pthread_t tid_; + void* arg_; +}; + +Thread::Thread() {} + +int Thread::start( void* arg ) +{ + setArg(arg); + return pthread_create( &tid_, 0, Thread::entryPoint, this ); +} + +int Thread::run( void* arg ) +{ + printf(" (pid: %i)", getpid() ); + setup(); + execute( arg ); +} + +void* Thread::entryPoint( void* pthis ) +{ + Thread* pt = (Thread*)pthis; + pt->run( pt->arg() ); +} diff --git a/research/pipeline/Types/YUVFrame.hpp b/research/pipeline/Types/YUVFrame.hpp new file mode 100644 index 0000000..109f9a4 --- /dev/null +++ b/research/pipeline/Types/YUVFrame.hpp @@ -0,0 +1,16 @@ +#pragma once +#include "libavcodec/avcodec.h" + + +struct YUVFrame { + int width; + int height; + enum AVPixelFormat fmt; + AVFrame *pic; +/* + uchar *y; + uchar *u; + uchar *v; + int scanlineWidth[3]; +*/ +}; diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/Modules/ThreadBoundaryModule.cpp b/research/pipeline/Modules/ThreadBoundaryModule.cpp new file mode 100644 index 0000000..e4b07d4 --- /dev/null +++ b/research/pipeline/Modules/ThreadBoundaryModule.cpp @@ -0,0 +1,89 @@ + +/* + +class Consumer : public RoutingModule { +public: + Consumer( CommandQueue* b, Format format ) + : RoutingModule(), buffer( b ), formatId( format ) + { } + + void init() + { + } + + void start() + { + for (;;) { + const Command &command = buffer->remove(); + RoutingModule::command( command.command, command.arg ); + } + } + + const char* name() { return "Consumer"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue *buffer; + Format formatId; +}; + +class ConsumerThread : public Thread { +public: + ConsumerThread( Consumer *c ) + : consumer( c ) + { } + + void execute( void* ) + { + consumer->start(); + } + +private: + Consumer *consumer; +}; + + +class ThreadBoundryModule : public RoutingModule { +public: + ThreadBoundryModule( int size, Format format ) + : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ), + consumerThread( &consumer ), formatId( format ) + { + } + + void init() + { + } + + void connectTo( Module *m, const Frame &f ) + { + consumer.connectTo( m, f ); + consumerThread.start(0); + } + + void process( const Frame &frame ) + { + readCommandQueue.add( frame ); + } + + const char *name() { return "Thread Boundry Module"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue readCommandQueue; + Consumer consumer; + ConsumerThread consumerThread; + Format formatId; +}; + + +class ProcessBoundryThing : public DispatchInterface { +public: + void dispatch( Command *command ) + { + } +}; + +*/ diff --git a/research/pipeline/Modules/VideoCameraSourceModule.cpp b/research/pipeline/Modules/VideoCameraSourceModule.cpp new file mode 100644 index 0000000..deef2f9 --- /dev/null +++ b/research/pipeline/Modules/VideoCameraSourceModule.cpp @@ -0,0 +1,101 @@ + + +/* +class VideoCameraSourceModule : public SimpleModule { +public: + VideoCameraSourceModule() + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + AVFormatContext *avFormatContext = 0; + AVFormatParameters vp1, *vp = &vp1; + AVInputFormat *fmt1; + memset(vp, 0, sizeof(*vp)); + fmt1 = av_find_input_format("video4linux");//video_grab_format); + vp->device = 0;//"/dev/video";//video_device; + vp->channel = 0;//video_channel; + vp->standard = "pal";//"ntsc";//video_standard; + vp->width = WIDTH; + vp->height = HEIGHT; + vp->frame_rate = 50; + vp->frame_rate_base = 1; + if (av_open_input_file(&avFormatContext, "", fmt1, 0, vp) < 0) { + printf("Could not find video grab device\n"); + exit(1); + } + if ((avFormatContext->ctx_flags & AVFMTCTX_NOHEADER) && av_find_stream_info(avFormatContext) < 0) { + printf("Could not find video grab parameters\n"); + exit(1); + } + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + +// AVCodecContext *videoCodecContext = avcodec_alloc_context(); + AVCodecContext *videoCodecContext = &avFormatContext->streams[0]->codec; + AVCodec *codec = avcodec_find_decoder(avFormatContext->streams[0]->codec.codec_id); + + if ( !codec ) { + printf("error finding decoder\n"); + return; + } + + printf("found decoder: %s\n", codec->name); + + avFormatContext->streams[0]->r_frame_rate = vp->frame_rate; + avFormatContext->streams[0]->r_frame_rate_base = vp->frame_rate_base; + + videoCodecContext->width = vp->width; + videoCodecContext->height = vp->height; + +// if ( avcodec_open( videoCodecContext, &rawvideo_decoder ) < 0 ) { + if ( avcodec_open( videoCodecContext, codec ) < 0 ) { // is rawvideo_decoder + printf("error opening context\n"); + videoCodecContext = 0; + } + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + AVPacket pkt; + while( avFormatContext ) { + if ( av_read_frame(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + AVFrame *picture = avcodec_alloc_frame(); + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = picture; + Frame *currentFrame = new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + currentFrame->ref(); + + int gotPicture = 0; + avcodec_decode_video( videoCodecContext, picture, &gotPicture, pkt.data, pkt.size ); + + if ( gotPicture ) { + yuvFrame->fmt = videoCodecContext->pix_fmt; // is PIX_FMT_YUV422 + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; +// printf("showing frame: %i %ix%i\n", yuvFrame->fmt, yuvFrame->width, yuvFrame->height ); + SimpleModule::process( *currentFrame ); + } + } + } + } + + const char *name() { return "Video Camera Source"; } + Format inputFormat() { return "FRAME_ID_VIDEO_CAMERA_SOURCE"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } +}; +*/ + diff --git a/research/pipeline/PipelineManager.cpp b/research/pipeline/PipelineManager.cpp new file mode 100644 index 0000000..e003559 --- /dev/null +++ b/research/pipeline/PipelineManager.cpp @@ -0,0 +1,162 @@ + +class PipelineManager : public Thread { +public: + PipelineManager(); + void addSource( Format frameType ); + void addDestination( Format frameType ); + void clearTargets(); + void connectTogether(Module *m1, Module *m2, const Frame &f); + void makeConnections(Module *start); + void start( Frame *frame ) { Thread::start( (void *)frame ); } + void execute( void *p ); + void unconnectedRoute( Module *m, const Frame &f ); +private: + std::list sourceModules; + std::list destinationModules; + std::list source; + std::list destination; +}; + + +PipelineManager *pipelineMgr = 0; + + +PipelineManager::PipelineManager() +{ +} + +/* +void PipelineManager::newModule( Module *m ) +{ + printf("adding module: %s\n", m->name() ); + + allModules.push_front( m ); + + // update source modules list + for ( list::iterator it = source.begin(); it != source.end(); ++it ) { + if ( (*it) == m->inputFormat() ) { + sourceModules.push_front( m ); + // Just add it once + break; + } + } + + // update destination modules list + for ( list::iterator it = destination.begin(); it != destination.end(); ++it ) { + if ( (*it) == m->outputFormat() ) { + destinationModules.push_front( m ); + // Just add it once + break; + } + } +} +*/ + +void PipelineManager::addSource( Format frameType ) +{ + // update source modules list + Module *m = moduleMapper()->findModuleWithInputFormat( frameType ); + if ( m ) { + printf("adding source: %s\n", (const char *)frameType ); + source.push_front( frameType ); + sourceModules.push_front( m ); + } else { + printf("No source for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::addDestination( Format frameType ) +{ + Module *m = moduleMapper()->findModuleWithOutputFormat( frameType ); + if ( m ) { + printf("adding destination: %s\n", (const char *)frameType ); + destination.push_front( frameType ); + destinationModules.push_front( m ); + } else { + printf("No destination for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::clearTargets() +{ + sourceModules.clear(); + destinationModules.clear(); + source.clear(); + destination.clear(); +} + +void PipelineManager::connectTogether( Module *m1, Module *m2, const Frame &f ) +{ +/* + //printf(" [%s] -> [%s] %s", m1->outputFormat(), m2->inputFormat(), m2->name() ); + printf(" -> %s", m2->name() ); + + staticDispatch( m2, Init, 0 ); + + if ( m2->isBlocking() || m1->isBlocking() ) { + ThreadBoundryModule *threadModule = new ThreadBoundryModule( 32, m2->inputFormat() ); + threadModule->init(); + m1->connectTo( threadModule, f ); + threadModule->connectTo( m2, f ); + } else { + m1->connectTo( m2, f ); + } +*/ +} + +/* + Connects together module with a module that can process the frame + and then gets the module to process this first frame +*/ +void PipelineManager::unconnectedRoute( Module *m, const Frame &f ) +{ + Module *m2 = moduleMapper()->findModuleWithInputFormat( f.id() ); + if ( m2 ) { + //connectTogether( m, m2, f ); + printf("Connecting together: %s -> %s\n", m->name(), m2->name() ); + staticDispatch( m2, Init, 0 ); + m->connectTo( m2, f ); + m2->connectedFrom( m, f ); + staticDispatch( m2, Process, &f ); + } else { + printf("Didn't find route for %s\n", m->name()); + } +} + +void PipelineManager::makeConnections( Module *start ) +{ +/* + printf("making connections:\n"); + + Frame frame( "UNKNOWN", 0 ); + Module *currentModule = start; + Format dstFmt = destination.front(); + + dispatch( currentModule, Init, 0 ); + printf(" %s (pid: %i)", currentModule->name(), getpid() ); + + while ( currentModule->outputFormat() != dstFmt ) { + Module *m = moduleMapper()->findModuleWithInputFormat( currentModule->outputFormat() ); + if ( m ) { + connectTogether( currentModule, m, frame ); + currentModule = m; + } else { + break; + } + } + printf("\n"); +*/ +} + + +void PipelineManager::execute( void *d ) +{ + printf("starting...\n"); + for ( list::iterator it = sourceModules.begin(); it != sourceModules.end(); ++it ) { + //makeConnections( (*it) ); + staticDispatch( (*it), Init, 0 ); + staticDispatch( (*it), Process, d ); + } +} + + diff --git a/research/pipeline/README.md b/research/pipeline/README.md new file mode 100644 index 0000000..8df026f --- /dev/null +++ b/research/pipeline/README.md @@ -0,0 +1,30 @@ + + +Example sources to support: + +file:/home/user/Documents/images/jpeg/picture.jpg +file:/home/user/Documents/audio/mpeg/greatestsong.mp3 +file:/home/user/Documents/application/playlist/favourites.pls +file:/home/user/Documents/application/playlist/favourites.mpu +http://www.slashdot.org/somefile.mpg +http://www.streaming_radio_server.net:9000 +http://www.streaming_tv_server.net:9000 +camera +microphone +camera & microphone + + +Example outputs to support: + +File/URL +UDP packets +TCP/IP packets +OSS +Alsa +QSS +Visualiser +QDirectPainter +QPainter +XShm +DirectDraw +YUV acceleration diff --git a/research/pipeline/Types/Deadcode.cpp b/research/pipeline/Types/Deadcode.cpp new file mode 100644 index 0000000..d08e52a --- /dev/null +++ b/research/pipeline/Types/Deadcode.cpp @@ -0,0 +1,140 @@ + + +#if 0 + +1 = registerNewFormat("AAC", ".aac", "An AAC decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MP3 decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MAD decoder", AUDIO_CODEC); +1 = registerNewFormat("AAC", ".aac", "My AAC decoder", AUDIO_CODEC); +3 = registerNewFormat("3DS", ".3ds", "3D Studio File", AUDIO_CODEC); + +enum FormatType { + FRAME_ID_FILE_PROTO, + FRAME_ID_HTTP_PROTO, + FRAME_ID_RTSP_PROTO, + FRAME_ID_RTP_PROTO, + FRAME_ID_MMS_PROTO, + + FRAME_ID_GIF_FORMAT, + FRAME_ID_JPG_FORMAT, + FRAME_ID_PNG_FORMAT, + + FRAME_ID_MP3_FORMAT, + FRAME_ID_WAV_FORMAT, + FRAME_ID_GSM_FORMAT, + FRAME_ID_AMR_FORMAT, + + FRAME_ID_MPG_FORMAT, + FRAME_ID_AVI_FORMAT, + FRAME_ID_MP4_FORMAT, + FRAME_ID_MOV_FORMAT, + + FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG_AUDIO_PACKET = FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET, + FRAME_ID_GSM_AUDIO_PACKET, + FRAME_ID_AMR_AUDIO_PACKET, + FRAME_ID_AAC_AUDIO_PACKET, + FRAME_ID_LAST_PACKET_TYPE = FRAME_ID_AMR_AUDIO_PACKET, + + FRAME_ID_VIDEO_PACKET, + FRAME_ID_AUDIO_PACKET, + + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME, + + FRAME_ID_PCM_AUDIO_DATA, + + FRAME_ID_RENDERED_AUDIO, + FRAME_ID_RENDERED_VIDEO, + + FRAME_ID_URL_SOURCE, + FRAME_ID_AUDIO_SOURCE, + FRAME_ID_VIDEO_SOURCE, + + FRAME_ID_MULTIPLE_FORMAT, + FRAME_ID_ANY_ONE_OF_FORMAT, + + FRAME_ID_MULTIPLE_PACKET, + FRAME_ID_ANY_ONE_OF_PACKET, + + FRAME_ID_UNKNOWN +}; + +typedef struct FRAME_GENERIC { +/* + int generalId; + int specificId; + int streamId; +*/ + int bytes; + char* bits; + int pts; +}; + +enum videoCodecId { + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET +}; + +typedef struct FRAME_VIDEO_PACKET { + int codecId; + int bytes; + char* bits; +}; + +enum videoFrameFormat { + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME +}; + +typedef struct FRAME_VIDEO_FRAME { + int format; + int width; + int height; + int bytes; + char* bits; +}; + +struct UpPCMPacket { + int freq; + int bitsPerSample; + int size; + char data[1]; +}; + +struct DownPCMPacket { + +}; + +#endif + + + +/* +struct StreamPacket { + void *private; // AVPacket *pkt; + int streamId; + int size; + char *data; +}; +*/ + +/* +struct StreamPacket { + int streamId; + Frame frame; +}; +*/ + diff --git a/research/pipeline/Types/Format.hpp b/research/pipeline/Types/Format.hpp new file mode 100644 index 0000000..72642b6 --- /dev/null +++ b/research/pipeline/Types/Format.hpp @@ -0,0 +1,29 @@ +#pragma once +#include + +// Format +class Format +{ +public: + Format() : s(nullptr) { } + Format(const Format &other) : s( other.s ) { } + Format(const char *str) : s( str ) { } + bool operator==(const Format& other) + { + return !std::strcmp(other.s, s); + } + operator const char *() + { + return s; + } + bool operator==(const char *other) + { + return !std::strcmp(s, other); + } + bool operator<(const Format& other) const + { + return std::strcmp(other.s, s) < 0; + } +private: + const char *s; +}; diff --git a/research/pipeline/Types/Frame.hpp b/research/pipeline/Types/Frame.hpp new file mode 100644 index 0000000..35ddb08 --- /dev/null +++ b/research/pipeline/Types/Frame.hpp @@ -0,0 +1,51 @@ +#pragma once +#include +#include "Format.hpp" + +// Frame +class Frame +{ +public: + Frame() { } + + Frame( const char* id, void* data ) + : counter( 0 ) + , type( id ) + , bits( data ) + { + pthread_mutex_init( &mutex, NULL ); + } + + void ref() const + { + pthread_mutex_lock( &mutex ); + ++counter; + pthread_mutex_unlock( &mutex ); + } + + void deref() const + { + pthread_mutex_lock( &mutex ); + --counter; + pthread_mutex_unlock( &mutex ); + } + + int refcount() const + { + int ret; + pthread_mutex_lock( &mutex ); + ret = counter; + pthread_mutex_unlock( &mutex ); + return ret; + } + + Format id() const { return type; } + void* data() const { return bits; } + +private: + mutable pthread_mutex_t mutex; + mutable int counter; + Format type; + void *bits; +}; + diff --git a/research/pipeline/Types/Module.hpp b/research/pipeline/Types/Module.hpp new file mode 100644 index 0000000..f0ad0fc --- /dev/null +++ b/research/pipeline/Types/Module.hpp @@ -0,0 +1,118 @@ +#pragma once +#include +#include +#include +#include "Frame.hpp" +#include "Format.hpp" + +class Module; + +enum Commands { Init, Pull, Deref, Process, Simulate, ConnectToModule, ConnectedFrom }; + +typedef Module *Address; + +struct Command { + Address address; + Commands command; + const void *arg; +}; + +// CommandQueue +class CommandQueue { +public: + CommandQueue( int size ); + + void add( const Command & ); + const Command &remove(); + +private: + int max; + const Command **commands; + int in, out; + + pthread_mutex_t mutex; + sem_t free; + sem_t used; +}; + +CommandQueue::CommandQueue( int size ) + : max( size ), in( 0 ), out( 0 ) +{ + commands = new const Command*[max]; + pthread_mutex_init( &mutex, NULL ); + sem_init( &free, 0, max ); + sem_init( &used, 0, 0 ); +} + +void CommandQueue::add( const Command &command ) +{ + while( sem_wait( &free ) != 0 ); + pthread_mutex_lock( &mutex ); + + commands[in] = &command; + in = ( in + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &used ); +} + +const Command &CommandQueue::remove() +{ + while( sem_wait( &used ) != 0 ); + pthread_mutex_lock( &mutex ); + + const Command *command = commands[out]; + out = ( out + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &free ); + + return *command; +} + + + +class ModuleFactory { +public: + ModuleFactory() { } + + virtual const char *name() = 0; + + virtual std::list
threadAffinity() = 0; + virtual bool isBlocking() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; + virtual bool supportsInputFormat( Format ) = 0; + virtual bool supportsOutputFormat( Format ) = 0; + + virtual Module *createInstance() = 0; +}; + + + +// Modules +class Module { +public: + Module() { } + + virtual const char *name() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; +// virtual bool constFrameProcessing() = 0; + +// virtual bool supportsInputType( Format ) = 0; + virtual bool supportsOutputType( Format ) = 0; + +// virtual list inputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } +// virtual list outputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } + + virtual bool isBlocking() = 0;//{ return false; } + virtual std::list
threadAffinity() = 0; + +// virtual void command( Command command, const void *arg, bool priorityFlag ) = 0; + virtual void command( Commands command, const void *arg ) = 0; + + virtual void connectTo( Module *next, const Frame &f ) = 0; + virtual void connectedFrom( Module *next, const Frame &f ) = 0; +}; + diff --git a/research/pipeline/Types/PCMData.hpp b/research/pipeline/Types/PCMData.hpp new file mode 100644 index 0000000..57de038 --- /dev/null +++ b/research/pipeline/Types/PCMData.hpp @@ -0,0 +1,7 @@ +#pragma once + +struct PCMData +{ + int size; + char data[65536]; +}; diff --git a/research/pipeline/Types/Thread.hpp b/research/pipeline/Types/Thread.hpp new file mode 100644 index 0000000..d7922a2 --- /dev/null +++ b/research/pipeline/Types/Thread.hpp @@ -0,0 +1,41 @@ +#pragma once + +// Utils +class Thread { +public: + Thread(); + int start( void* arg ); + +protected: + int run( void* arg ); + static void* entryPoint( void* ); + virtual void setup() { }; + virtual void execute( void* ) = 0; + void* arg() const { return arg_; } + void setArg( void* a ) { arg_ = a; } + +private: + pthread_t tid_; + void* arg_; +}; + +Thread::Thread() {} + +int Thread::start( void* arg ) +{ + setArg(arg); + return pthread_create( &tid_, 0, Thread::entryPoint, this ); +} + +int Thread::run( void* arg ) +{ + printf(" (pid: %i)", getpid() ); + setup(); + execute( arg ); +} + +void* Thread::entryPoint( void* pthis ) +{ + Thread* pt = (Thread*)pthis; + pt->run( pt->arg() ); +} diff --git a/research/pipeline/Types/YUVFrame.hpp b/research/pipeline/Types/YUVFrame.hpp new file mode 100644 index 0000000..109f9a4 --- /dev/null +++ b/research/pipeline/Types/YUVFrame.hpp @@ -0,0 +1,16 @@ +#pragma once +#include "libavcodec/avcodec.h" + + +struct YUVFrame { + int width; + int height; + enum AVPixelFormat fmt; + AVFrame *pic; +/* + uchar *y; + uchar *u; + uchar *v; + int scanlineWidth[3]; +*/ +}; diff --git a/research/pipeline/prototype.cpp b/research/pipeline/prototype.cpp new file mode 100755 index 0000000..a0c03f9 --- /dev/null +++ b/research/pipeline/prototype.cpp @@ -0,0 +1,172 @@ +/* + + Project Carmack 0.01 (AKA Media Library Prototype 01/02) + Copyright John Ryland, 2005 + +*/ + +using namespace std; + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "Types/Frame.hpp" +#include "Types/Thread.hpp" +#include "Types/Module.hpp" + + +#define WIDTH 160 +#define HEIGHT 120 + + + + +/* +class ModulesThread : public Thread, public DispatchInterface { +public: + void execute( void* ) + { + for (;;) { + CommandStruct *command = buffer.remove(); + command->module->command( command->command, command->arg ); + } + } + + void dispatch( CommandStruct *command ) + { + buffer.add( command ); + } + +private: + CommandQueue buffer; +}; +*/ + + + + +static void staticDispatch( Address address, Commands command, const void *arg ) +{ + moduleMapper()->dispatchCommand( address, command, arg ); +} + + + + +struct FFMpegStreamPacket { + AVPacket *packet; +}; + + + + + +void ProcessMessages(); + + + + + + + +Module *a, *b, *c, *d; + + +void registerModules() +{ + moduleMapper()->addModule( new OSSRenderer ); +// moduleMapper()->addModule( d = new YUVRenderer ); + moduleMapper()->addModule( d = new DirectDrawRenderer ); + moduleMapper()->addModule( new MP3DecodeModule ); +// moduleMapper()->addModule( new FFMpegMuxModule ); + moduleMapper()->addModule( new MpegDecodeModule ); +// moduleMapper()->addModule( new MP3SourceModule ); +// moduleMapper()->addModule( new StreamDemuxModule ); + moduleMapper()->addModule( c = new MpegEncodeModule ); +// moduleMapper()->addModule( b = new Splitter ); + moduleMapper()->addModule( new FFMpegSourceModule ); +// moduleMapper()->addModule( a = new VideoCameraSourceModule ); +} + +void playFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_AUDIO" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + //pipelineMgr->start( &file ); + pipelineMgr->execute( &file ); +} + + +void displayCamera() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +void reEncodeFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + pipelineMgr->start( &file ); +} + +void recordVideo() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +int main( int argc, char** argv ) +{ + registerModules(); + pipelineMgr = new PipelineManager; +/* + Frame f; + printf("Connecting together: %s -> %s\n", a->name(), b->name() ); + staticDispatch( b, Init, 0 ); + a->connectTo( b, f ); +// b->connectedFrom( a, f ); + + printf("Connecting together: %s -> %s\n", b->name(), c->name() ); + staticDispatch( c, Init, 0 ); + b->connectTo( c, f ); + + printf("Connecting together: %s -> %s\n", b->name(), d->name() ); + staticDispatch( d, Init, 0 ); + b->connectTo( d, f ); +*/ + playFile( (argc > 1) ? argv[1] : "test.mpg" ); + //reEncodeFile( (argc > 1) ? argv[1] : "test.mpg" ); + //displayCamera(); + //recordVideo(); +} + diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/Modules/ThreadBoundaryModule.cpp b/research/pipeline/Modules/ThreadBoundaryModule.cpp new file mode 100644 index 0000000..e4b07d4 --- /dev/null +++ b/research/pipeline/Modules/ThreadBoundaryModule.cpp @@ -0,0 +1,89 @@ + +/* + +class Consumer : public RoutingModule { +public: + Consumer( CommandQueue* b, Format format ) + : RoutingModule(), buffer( b ), formatId( format ) + { } + + void init() + { + } + + void start() + { + for (;;) { + const Command &command = buffer->remove(); + RoutingModule::command( command.command, command.arg ); + } + } + + const char* name() { return "Consumer"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue *buffer; + Format formatId; +}; + +class ConsumerThread : public Thread { +public: + ConsumerThread( Consumer *c ) + : consumer( c ) + { } + + void execute( void* ) + { + consumer->start(); + } + +private: + Consumer *consumer; +}; + + +class ThreadBoundryModule : public RoutingModule { +public: + ThreadBoundryModule( int size, Format format ) + : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ), + consumerThread( &consumer ), formatId( format ) + { + } + + void init() + { + } + + void connectTo( Module *m, const Frame &f ) + { + consumer.connectTo( m, f ); + consumerThread.start(0); + } + + void process( const Frame &frame ) + { + readCommandQueue.add( frame ); + } + + const char *name() { return "Thread Boundry Module"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue readCommandQueue; + Consumer consumer; + ConsumerThread consumerThread; + Format formatId; +}; + + +class ProcessBoundryThing : public DispatchInterface { +public: + void dispatch( Command *command ) + { + } +}; + +*/ diff --git a/research/pipeline/Modules/VideoCameraSourceModule.cpp b/research/pipeline/Modules/VideoCameraSourceModule.cpp new file mode 100644 index 0000000..deef2f9 --- /dev/null +++ b/research/pipeline/Modules/VideoCameraSourceModule.cpp @@ -0,0 +1,101 @@ + + +/* +class VideoCameraSourceModule : public SimpleModule { +public: + VideoCameraSourceModule() + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + AVFormatContext *avFormatContext = 0; + AVFormatParameters vp1, *vp = &vp1; + AVInputFormat *fmt1; + memset(vp, 0, sizeof(*vp)); + fmt1 = av_find_input_format("video4linux");//video_grab_format); + vp->device = 0;//"/dev/video";//video_device; + vp->channel = 0;//video_channel; + vp->standard = "pal";//"ntsc";//video_standard; + vp->width = WIDTH; + vp->height = HEIGHT; + vp->frame_rate = 50; + vp->frame_rate_base = 1; + if (av_open_input_file(&avFormatContext, "", fmt1, 0, vp) < 0) { + printf("Could not find video grab device\n"); + exit(1); + } + if ((avFormatContext->ctx_flags & AVFMTCTX_NOHEADER) && av_find_stream_info(avFormatContext) < 0) { + printf("Could not find video grab parameters\n"); + exit(1); + } + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + +// AVCodecContext *videoCodecContext = avcodec_alloc_context(); + AVCodecContext *videoCodecContext = &avFormatContext->streams[0]->codec; + AVCodec *codec = avcodec_find_decoder(avFormatContext->streams[0]->codec.codec_id); + + if ( !codec ) { + printf("error finding decoder\n"); + return; + } + + printf("found decoder: %s\n", codec->name); + + avFormatContext->streams[0]->r_frame_rate = vp->frame_rate; + avFormatContext->streams[0]->r_frame_rate_base = vp->frame_rate_base; + + videoCodecContext->width = vp->width; + videoCodecContext->height = vp->height; + +// if ( avcodec_open( videoCodecContext, &rawvideo_decoder ) < 0 ) { + if ( avcodec_open( videoCodecContext, codec ) < 0 ) { // is rawvideo_decoder + printf("error opening context\n"); + videoCodecContext = 0; + } + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + AVPacket pkt; + while( avFormatContext ) { + if ( av_read_frame(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + AVFrame *picture = avcodec_alloc_frame(); + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = picture; + Frame *currentFrame = new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + currentFrame->ref(); + + int gotPicture = 0; + avcodec_decode_video( videoCodecContext, picture, &gotPicture, pkt.data, pkt.size ); + + if ( gotPicture ) { + yuvFrame->fmt = videoCodecContext->pix_fmt; // is PIX_FMT_YUV422 + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; +// printf("showing frame: %i %ix%i\n", yuvFrame->fmt, yuvFrame->width, yuvFrame->height ); + SimpleModule::process( *currentFrame ); + } + } + } + } + + const char *name() { return "Video Camera Source"; } + Format inputFormat() { return "FRAME_ID_VIDEO_CAMERA_SOURCE"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } +}; +*/ + diff --git a/research/pipeline/PipelineManager.cpp b/research/pipeline/PipelineManager.cpp new file mode 100644 index 0000000..e003559 --- /dev/null +++ b/research/pipeline/PipelineManager.cpp @@ -0,0 +1,162 @@ + +class PipelineManager : public Thread { +public: + PipelineManager(); + void addSource( Format frameType ); + void addDestination( Format frameType ); + void clearTargets(); + void connectTogether(Module *m1, Module *m2, const Frame &f); + void makeConnections(Module *start); + void start( Frame *frame ) { Thread::start( (void *)frame ); } + void execute( void *p ); + void unconnectedRoute( Module *m, const Frame &f ); +private: + std::list sourceModules; + std::list destinationModules; + std::list source; + std::list destination; +}; + + +PipelineManager *pipelineMgr = 0; + + +PipelineManager::PipelineManager() +{ +} + +/* +void PipelineManager::newModule( Module *m ) +{ + printf("adding module: %s\n", m->name() ); + + allModules.push_front( m ); + + // update source modules list + for ( list::iterator it = source.begin(); it != source.end(); ++it ) { + if ( (*it) == m->inputFormat() ) { + sourceModules.push_front( m ); + // Just add it once + break; + } + } + + // update destination modules list + for ( list::iterator it = destination.begin(); it != destination.end(); ++it ) { + if ( (*it) == m->outputFormat() ) { + destinationModules.push_front( m ); + // Just add it once + break; + } + } +} +*/ + +void PipelineManager::addSource( Format frameType ) +{ + // update source modules list + Module *m = moduleMapper()->findModuleWithInputFormat( frameType ); + if ( m ) { + printf("adding source: %s\n", (const char *)frameType ); + source.push_front( frameType ); + sourceModules.push_front( m ); + } else { + printf("No source for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::addDestination( Format frameType ) +{ + Module *m = moduleMapper()->findModuleWithOutputFormat( frameType ); + if ( m ) { + printf("adding destination: %s\n", (const char *)frameType ); + destination.push_front( frameType ); + destinationModules.push_front( m ); + } else { + printf("No destination for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::clearTargets() +{ + sourceModules.clear(); + destinationModules.clear(); + source.clear(); + destination.clear(); +} + +void PipelineManager::connectTogether( Module *m1, Module *m2, const Frame &f ) +{ +/* + //printf(" [%s] -> [%s] %s", m1->outputFormat(), m2->inputFormat(), m2->name() ); + printf(" -> %s", m2->name() ); + + staticDispatch( m2, Init, 0 ); + + if ( m2->isBlocking() || m1->isBlocking() ) { + ThreadBoundryModule *threadModule = new ThreadBoundryModule( 32, m2->inputFormat() ); + threadModule->init(); + m1->connectTo( threadModule, f ); + threadModule->connectTo( m2, f ); + } else { + m1->connectTo( m2, f ); + } +*/ +} + +/* + Connects together module with a module that can process the frame + and then gets the module to process this first frame +*/ +void PipelineManager::unconnectedRoute( Module *m, const Frame &f ) +{ + Module *m2 = moduleMapper()->findModuleWithInputFormat( f.id() ); + if ( m2 ) { + //connectTogether( m, m2, f ); + printf("Connecting together: %s -> %s\n", m->name(), m2->name() ); + staticDispatch( m2, Init, 0 ); + m->connectTo( m2, f ); + m2->connectedFrom( m, f ); + staticDispatch( m2, Process, &f ); + } else { + printf("Didn't find route for %s\n", m->name()); + } +} + +void PipelineManager::makeConnections( Module *start ) +{ +/* + printf("making connections:\n"); + + Frame frame( "UNKNOWN", 0 ); + Module *currentModule = start; + Format dstFmt = destination.front(); + + dispatch( currentModule, Init, 0 ); + printf(" %s (pid: %i)", currentModule->name(), getpid() ); + + while ( currentModule->outputFormat() != dstFmt ) { + Module *m = moduleMapper()->findModuleWithInputFormat( currentModule->outputFormat() ); + if ( m ) { + connectTogether( currentModule, m, frame ); + currentModule = m; + } else { + break; + } + } + printf("\n"); +*/ +} + + +void PipelineManager::execute( void *d ) +{ + printf("starting...\n"); + for ( list::iterator it = sourceModules.begin(); it != sourceModules.end(); ++it ) { + //makeConnections( (*it) ); + staticDispatch( (*it), Init, 0 ); + staticDispatch( (*it), Process, d ); + } +} + + diff --git a/research/pipeline/README.md b/research/pipeline/README.md new file mode 100644 index 0000000..8df026f --- /dev/null +++ b/research/pipeline/README.md @@ -0,0 +1,30 @@ + + +Example sources to support: + +file:/home/user/Documents/images/jpeg/picture.jpg +file:/home/user/Documents/audio/mpeg/greatestsong.mp3 +file:/home/user/Documents/application/playlist/favourites.pls +file:/home/user/Documents/application/playlist/favourites.mpu +http://www.slashdot.org/somefile.mpg +http://www.streaming_radio_server.net:9000 +http://www.streaming_tv_server.net:9000 +camera +microphone +camera & microphone + + +Example outputs to support: + +File/URL +UDP packets +TCP/IP packets +OSS +Alsa +QSS +Visualiser +QDirectPainter +QPainter +XShm +DirectDraw +YUV acceleration diff --git a/research/pipeline/Types/Deadcode.cpp b/research/pipeline/Types/Deadcode.cpp new file mode 100644 index 0000000..d08e52a --- /dev/null +++ b/research/pipeline/Types/Deadcode.cpp @@ -0,0 +1,140 @@ + + +#if 0 + +1 = registerNewFormat("AAC", ".aac", "An AAC decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MP3 decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MAD decoder", AUDIO_CODEC); +1 = registerNewFormat("AAC", ".aac", "My AAC decoder", AUDIO_CODEC); +3 = registerNewFormat("3DS", ".3ds", "3D Studio File", AUDIO_CODEC); + +enum FormatType { + FRAME_ID_FILE_PROTO, + FRAME_ID_HTTP_PROTO, + FRAME_ID_RTSP_PROTO, + FRAME_ID_RTP_PROTO, + FRAME_ID_MMS_PROTO, + + FRAME_ID_GIF_FORMAT, + FRAME_ID_JPG_FORMAT, + FRAME_ID_PNG_FORMAT, + + FRAME_ID_MP3_FORMAT, + FRAME_ID_WAV_FORMAT, + FRAME_ID_GSM_FORMAT, + FRAME_ID_AMR_FORMAT, + + FRAME_ID_MPG_FORMAT, + FRAME_ID_AVI_FORMAT, + FRAME_ID_MP4_FORMAT, + FRAME_ID_MOV_FORMAT, + + FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG_AUDIO_PACKET = FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET, + FRAME_ID_GSM_AUDIO_PACKET, + FRAME_ID_AMR_AUDIO_PACKET, + FRAME_ID_AAC_AUDIO_PACKET, + FRAME_ID_LAST_PACKET_TYPE = FRAME_ID_AMR_AUDIO_PACKET, + + FRAME_ID_VIDEO_PACKET, + FRAME_ID_AUDIO_PACKET, + + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME, + + FRAME_ID_PCM_AUDIO_DATA, + + FRAME_ID_RENDERED_AUDIO, + FRAME_ID_RENDERED_VIDEO, + + FRAME_ID_URL_SOURCE, + FRAME_ID_AUDIO_SOURCE, + FRAME_ID_VIDEO_SOURCE, + + FRAME_ID_MULTIPLE_FORMAT, + FRAME_ID_ANY_ONE_OF_FORMAT, + + FRAME_ID_MULTIPLE_PACKET, + FRAME_ID_ANY_ONE_OF_PACKET, + + FRAME_ID_UNKNOWN +}; + +typedef struct FRAME_GENERIC { +/* + int generalId; + int specificId; + int streamId; +*/ + int bytes; + char* bits; + int pts; +}; + +enum videoCodecId { + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET +}; + +typedef struct FRAME_VIDEO_PACKET { + int codecId; + int bytes; + char* bits; +}; + +enum videoFrameFormat { + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME +}; + +typedef struct FRAME_VIDEO_FRAME { + int format; + int width; + int height; + int bytes; + char* bits; +}; + +struct UpPCMPacket { + int freq; + int bitsPerSample; + int size; + char data[1]; +}; + +struct DownPCMPacket { + +}; + +#endif + + + +/* +struct StreamPacket { + void *private; // AVPacket *pkt; + int streamId; + int size; + char *data; +}; +*/ + +/* +struct StreamPacket { + int streamId; + Frame frame; +}; +*/ + diff --git a/research/pipeline/Types/Format.hpp b/research/pipeline/Types/Format.hpp new file mode 100644 index 0000000..72642b6 --- /dev/null +++ b/research/pipeline/Types/Format.hpp @@ -0,0 +1,29 @@ +#pragma once +#include + +// Format +class Format +{ +public: + Format() : s(nullptr) { } + Format(const Format &other) : s( other.s ) { } + Format(const char *str) : s( str ) { } + bool operator==(const Format& other) + { + return !std::strcmp(other.s, s); + } + operator const char *() + { + return s; + } + bool operator==(const char *other) + { + return !std::strcmp(s, other); + } + bool operator<(const Format& other) const + { + return std::strcmp(other.s, s) < 0; + } +private: + const char *s; +}; diff --git a/research/pipeline/Types/Frame.hpp b/research/pipeline/Types/Frame.hpp new file mode 100644 index 0000000..35ddb08 --- /dev/null +++ b/research/pipeline/Types/Frame.hpp @@ -0,0 +1,51 @@ +#pragma once +#include +#include "Format.hpp" + +// Frame +class Frame +{ +public: + Frame() { } + + Frame( const char* id, void* data ) + : counter( 0 ) + , type( id ) + , bits( data ) + { + pthread_mutex_init( &mutex, NULL ); + } + + void ref() const + { + pthread_mutex_lock( &mutex ); + ++counter; + pthread_mutex_unlock( &mutex ); + } + + void deref() const + { + pthread_mutex_lock( &mutex ); + --counter; + pthread_mutex_unlock( &mutex ); + } + + int refcount() const + { + int ret; + pthread_mutex_lock( &mutex ); + ret = counter; + pthread_mutex_unlock( &mutex ); + return ret; + } + + Format id() const { return type; } + void* data() const { return bits; } + +private: + mutable pthread_mutex_t mutex; + mutable int counter; + Format type; + void *bits; +}; + diff --git a/research/pipeline/Types/Module.hpp b/research/pipeline/Types/Module.hpp new file mode 100644 index 0000000..f0ad0fc --- /dev/null +++ b/research/pipeline/Types/Module.hpp @@ -0,0 +1,118 @@ +#pragma once +#include +#include +#include +#include "Frame.hpp" +#include "Format.hpp" + +class Module; + +enum Commands { Init, Pull, Deref, Process, Simulate, ConnectToModule, ConnectedFrom }; + +typedef Module *Address; + +struct Command { + Address address; + Commands command; + const void *arg; +}; + +// CommandQueue +class CommandQueue { +public: + CommandQueue( int size ); + + void add( const Command & ); + const Command &remove(); + +private: + int max; + const Command **commands; + int in, out; + + pthread_mutex_t mutex; + sem_t free; + sem_t used; +}; + +CommandQueue::CommandQueue( int size ) + : max( size ), in( 0 ), out( 0 ) +{ + commands = new const Command*[max]; + pthread_mutex_init( &mutex, NULL ); + sem_init( &free, 0, max ); + sem_init( &used, 0, 0 ); +} + +void CommandQueue::add( const Command &command ) +{ + while( sem_wait( &free ) != 0 ); + pthread_mutex_lock( &mutex ); + + commands[in] = &command; + in = ( in + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &used ); +} + +const Command &CommandQueue::remove() +{ + while( sem_wait( &used ) != 0 ); + pthread_mutex_lock( &mutex ); + + const Command *command = commands[out]; + out = ( out + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &free ); + + return *command; +} + + + +class ModuleFactory { +public: + ModuleFactory() { } + + virtual const char *name() = 0; + + virtual std::list
threadAffinity() = 0; + virtual bool isBlocking() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; + virtual bool supportsInputFormat( Format ) = 0; + virtual bool supportsOutputFormat( Format ) = 0; + + virtual Module *createInstance() = 0; +}; + + + +// Modules +class Module { +public: + Module() { } + + virtual const char *name() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; +// virtual bool constFrameProcessing() = 0; + +// virtual bool supportsInputType( Format ) = 0; + virtual bool supportsOutputType( Format ) = 0; + +// virtual list inputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } +// virtual list outputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } + + virtual bool isBlocking() = 0;//{ return false; } + virtual std::list
threadAffinity() = 0; + +// virtual void command( Command command, const void *arg, bool priorityFlag ) = 0; + virtual void command( Commands command, const void *arg ) = 0; + + virtual void connectTo( Module *next, const Frame &f ) = 0; + virtual void connectedFrom( Module *next, const Frame &f ) = 0; +}; + diff --git a/research/pipeline/Types/PCMData.hpp b/research/pipeline/Types/PCMData.hpp new file mode 100644 index 0000000..57de038 --- /dev/null +++ b/research/pipeline/Types/PCMData.hpp @@ -0,0 +1,7 @@ +#pragma once + +struct PCMData +{ + int size; + char data[65536]; +}; diff --git a/research/pipeline/Types/Thread.hpp b/research/pipeline/Types/Thread.hpp new file mode 100644 index 0000000..d7922a2 --- /dev/null +++ b/research/pipeline/Types/Thread.hpp @@ -0,0 +1,41 @@ +#pragma once + +// Utils +class Thread { +public: + Thread(); + int start( void* arg ); + +protected: + int run( void* arg ); + static void* entryPoint( void* ); + virtual void setup() { }; + virtual void execute( void* ) = 0; + void* arg() const { return arg_; } + void setArg( void* a ) { arg_ = a; } + +private: + pthread_t tid_; + void* arg_; +}; + +Thread::Thread() {} + +int Thread::start( void* arg ) +{ + setArg(arg); + return pthread_create( &tid_, 0, Thread::entryPoint, this ); +} + +int Thread::run( void* arg ) +{ + printf(" (pid: %i)", getpid() ); + setup(); + execute( arg ); +} + +void* Thread::entryPoint( void* pthis ) +{ + Thread* pt = (Thread*)pthis; + pt->run( pt->arg() ); +} diff --git a/research/pipeline/Types/YUVFrame.hpp b/research/pipeline/Types/YUVFrame.hpp new file mode 100644 index 0000000..109f9a4 --- /dev/null +++ b/research/pipeline/Types/YUVFrame.hpp @@ -0,0 +1,16 @@ +#pragma once +#include "libavcodec/avcodec.h" + + +struct YUVFrame { + int width; + int height; + enum AVPixelFormat fmt; + AVFrame *pic; +/* + uchar *y; + uchar *u; + uchar *v; + int scanlineWidth[3]; +*/ +}; diff --git a/research/pipeline/prototype.cpp b/research/pipeline/prototype.cpp new file mode 100755 index 0000000..a0c03f9 --- /dev/null +++ b/research/pipeline/prototype.cpp @@ -0,0 +1,172 @@ +/* + + Project Carmack 0.01 (AKA Media Library Prototype 01/02) + Copyright John Ryland, 2005 + +*/ + +using namespace std; + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "Types/Frame.hpp" +#include "Types/Thread.hpp" +#include "Types/Module.hpp" + + +#define WIDTH 160 +#define HEIGHT 120 + + + + +/* +class ModulesThread : public Thread, public DispatchInterface { +public: + void execute( void* ) + { + for (;;) { + CommandStruct *command = buffer.remove(); + command->module->command( command->command, command->arg ); + } + } + + void dispatch( CommandStruct *command ) + { + buffer.add( command ); + } + +private: + CommandQueue buffer; +}; +*/ + + + + +static void staticDispatch( Address address, Commands command, const void *arg ) +{ + moduleMapper()->dispatchCommand( address, command, arg ); +} + + + + +struct FFMpegStreamPacket { + AVPacket *packet; +}; + + + + + +void ProcessMessages(); + + + + + + + +Module *a, *b, *c, *d; + + +void registerModules() +{ + moduleMapper()->addModule( new OSSRenderer ); +// moduleMapper()->addModule( d = new YUVRenderer ); + moduleMapper()->addModule( d = new DirectDrawRenderer ); + moduleMapper()->addModule( new MP3DecodeModule ); +// moduleMapper()->addModule( new FFMpegMuxModule ); + moduleMapper()->addModule( new MpegDecodeModule ); +// moduleMapper()->addModule( new MP3SourceModule ); +// moduleMapper()->addModule( new StreamDemuxModule ); + moduleMapper()->addModule( c = new MpegEncodeModule ); +// moduleMapper()->addModule( b = new Splitter ); + moduleMapper()->addModule( new FFMpegSourceModule ); +// moduleMapper()->addModule( a = new VideoCameraSourceModule ); +} + +void playFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_AUDIO" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + //pipelineMgr->start( &file ); + pipelineMgr->execute( &file ); +} + + +void displayCamera() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +void reEncodeFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + pipelineMgr->start( &file ); +} + +void recordVideo() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +int main( int argc, char** argv ) +{ + registerModules(); + pipelineMgr = new PipelineManager; +/* + Frame f; + printf("Connecting together: %s -> %s\n", a->name(), b->name() ); + staticDispatch( b, Init, 0 ); + a->connectTo( b, f ); +// b->connectedFrom( a, f ); + + printf("Connecting together: %s -> %s\n", b->name(), c->name() ); + staticDispatch( c, Init, 0 ); + b->connectTo( c, f ); + + printf("Connecting together: %s -> %s\n", b->name(), d->name() ); + staticDispatch( d, Init, 0 ); + b->connectTo( d, f ); +*/ + playFile( (argc > 1) ? argv[1] : "test.mpg" ); + //reEncodeFile( (argc > 1) ? argv[1] : "test.mpg" ); + //displayCamera(); + //recordVideo(); +} + diff --git a/research/string-tables/.gitignore b/research/string-tables/.gitignore new file mode 100644 index 0000000..7c6ad91 --- /dev/null +++ b/research/string-tables/.gitignore @@ -0,0 +1,61 @@ +build/cmake_install.cmake +build/CMakeCache.txt +build/compile_commands.json +build/FixedStrings.inl +build/libProgram.a +build/libStringsTable.a +build/Makefile +build/StringsTableTest +build/CMakeFiles/cmake.check_cache +build/CMakeFiles/CMakeDirectoryInformation.cmake +build/CMakeFiles/CMakeOutput.log +build/CMakeFiles/CMakeRuleHashes.txt +build/CMakeFiles/feature_tests.bin +build/CMakeFiles/feature_tests.c +build/CMakeFiles/feature_tests.cxx +build/CMakeFiles/Makefile.cmake +build/CMakeFiles/Makefile2 +build/CMakeFiles/progress.marks +build/CMakeFiles/TargetDirectories.txt +build/CMakeFiles/3.5.1/CMakeCCompiler.cmake +build/CMakeFiles/3.5.1/CMakeCXXCompiler.cmake +build/CMakeFiles/3.5.1/CMakeDetermineCompilerABI_C.bin +build/CMakeFiles/3.5.1/CMakeDetermineCompilerABI_CXX.bin +build/CMakeFiles/3.5.1/CMakeSystem.cmake +build/CMakeFiles/3.5.1/CompilerIdC/a.out +build/CMakeFiles/3.5.1/CompilerIdC/CMakeCCompilerId.c +build/CMakeFiles/3.5.1/CompilerIdCXX/a.out +build/CMakeFiles/3.5.1/CompilerIdCXX/CMakeCXXCompilerId.cpp +build/CMakeFiles/Program.dir/build.make +build/CMakeFiles/Program.dir/cmake_clean_target.cmake +build/CMakeFiles/Program.dir/cmake_clean.cmake +build/CMakeFiles/Program.dir/CXX.includecache +build/CMakeFiles/Program.dir/depend.internal +build/CMakeFiles/Program.dir/depend.make +build/CMakeFiles/Program.dir/DependInfo.cmake +build/CMakeFiles/Program.dir/flags.make +build/CMakeFiles/Program.dir/link.txt +build/CMakeFiles/Program.dir/program.cpp.o +build/CMakeFiles/Program.dir/progress.make +build/CMakeFiles/StringsTable.dir/build.make +build/CMakeFiles/StringsTable.dir/cmake_clean_target.cmake +build/CMakeFiles/StringsTable.dir/cmake_clean.cmake +build/CMakeFiles/StringsTable.dir/CXX.includecache +build/CMakeFiles/StringsTable.dir/depend.internal +build/CMakeFiles/StringsTable.dir/depend.make +build/CMakeFiles/StringsTable.dir/DependInfo.cmake +build/CMakeFiles/StringsTable.dir/FixedStrings.cpp.o +build/CMakeFiles/StringsTable.dir/flags.make +build/CMakeFiles/StringsTable.dir/link.txt +build/CMakeFiles/StringsTable.dir/progress.make +build/CMakeFiles/StringsTableTest.dir/build.make +build/CMakeFiles/StringsTableTest.dir/cmake_clean.cmake +build/CMakeFiles/StringsTableTest.dir/CXX.includecache +build/CMakeFiles/StringsTableTest.dir/depend.internal +build/CMakeFiles/StringsTableTest.dir/depend.make +build/CMakeFiles/StringsTableTest.dir/DependInfo.cmake +build/CMakeFiles/StringsTableTest.dir/flags.make +build/CMakeFiles/StringsTableTest.dir/link.txt +build/CMakeFiles/StringsTableTest.dir/main.cpp.o +build/CMakeFiles/StringsTableTest.dir/progress.make +README.pdf diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/Modules/ThreadBoundaryModule.cpp b/research/pipeline/Modules/ThreadBoundaryModule.cpp new file mode 100644 index 0000000..e4b07d4 --- /dev/null +++ b/research/pipeline/Modules/ThreadBoundaryModule.cpp @@ -0,0 +1,89 @@ + +/* + +class Consumer : public RoutingModule { +public: + Consumer( CommandQueue* b, Format format ) + : RoutingModule(), buffer( b ), formatId( format ) + { } + + void init() + { + } + + void start() + { + for (;;) { + const Command &command = buffer->remove(); + RoutingModule::command( command.command, command.arg ); + } + } + + const char* name() { return "Consumer"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue *buffer; + Format formatId; +}; + +class ConsumerThread : public Thread { +public: + ConsumerThread( Consumer *c ) + : consumer( c ) + { } + + void execute( void* ) + { + consumer->start(); + } + +private: + Consumer *consumer; +}; + + +class ThreadBoundryModule : public RoutingModule { +public: + ThreadBoundryModule( int size, Format format ) + : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ), + consumerThread( &consumer ), formatId( format ) + { + } + + void init() + { + } + + void connectTo( Module *m, const Frame &f ) + { + consumer.connectTo( m, f ); + consumerThread.start(0); + } + + void process( const Frame &frame ) + { + readCommandQueue.add( frame ); + } + + const char *name() { return "Thread Boundry Module"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue readCommandQueue; + Consumer consumer; + ConsumerThread consumerThread; + Format formatId; +}; + + +class ProcessBoundryThing : public DispatchInterface { +public: + void dispatch( Command *command ) + { + } +}; + +*/ diff --git a/research/pipeline/Modules/VideoCameraSourceModule.cpp b/research/pipeline/Modules/VideoCameraSourceModule.cpp new file mode 100644 index 0000000..deef2f9 --- /dev/null +++ b/research/pipeline/Modules/VideoCameraSourceModule.cpp @@ -0,0 +1,101 @@ + + +/* +class VideoCameraSourceModule : public SimpleModule { +public: + VideoCameraSourceModule() + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + AVFormatContext *avFormatContext = 0; + AVFormatParameters vp1, *vp = &vp1; + AVInputFormat *fmt1; + memset(vp, 0, sizeof(*vp)); + fmt1 = av_find_input_format("video4linux");//video_grab_format); + vp->device = 0;//"/dev/video";//video_device; + vp->channel = 0;//video_channel; + vp->standard = "pal";//"ntsc";//video_standard; + vp->width = WIDTH; + vp->height = HEIGHT; + vp->frame_rate = 50; + vp->frame_rate_base = 1; + if (av_open_input_file(&avFormatContext, "", fmt1, 0, vp) < 0) { + printf("Could not find video grab device\n"); + exit(1); + } + if ((avFormatContext->ctx_flags & AVFMTCTX_NOHEADER) && av_find_stream_info(avFormatContext) < 0) { + printf("Could not find video grab parameters\n"); + exit(1); + } + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + +// AVCodecContext *videoCodecContext = avcodec_alloc_context(); + AVCodecContext *videoCodecContext = &avFormatContext->streams[0]->codec; + AVCodec *codec = avcodec_find_decoder(avFormatContext->streams[0]->codec.codec_id); + + if ( !codec ) { + printf("error finding decoder\n"); + return; + } + + printf("found decoder: %s\n", codec->name); + + avFormatContext->streams[0]->r_frame_rate = vp->frame_rate; + avFormatContext->streams[0]->r_frame_rate_base = vp->frame_rate_base; + + videoCodecContext->width = vp->width; + videoCodecContext->height = vp->height; + +// if ( avcodec_open( videoCodecContext, &rawvideo_decoder ) < 0 ) { + if ( avcodec_open( videoCodecContext, codec ) < 0 ) { // is rawvideo_decoder + printf("error opening context\n"); + videoCodecContext = 0; + } + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + AVPacket pkt; + while( avFormatContext ) { + if ( av_read_frame(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + AVFrame *picture = avcodec_alloc_frame(); + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = picture; + Frame *currentFrame = new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + currentFrame->ref(); + + int gotPicture = 0; + avcodec_decode_video( videoCodecContext, picture, &gotPicture, pkt.data, pkt.size ); + + if ( gotPicture ) { + yuvFrame->fmt = videoCodecContext->pix_fmt; // is PIX_FMT_YUV422 + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; +// printf("showing frame: %i %ix%i\n", yuvFrame->fmt, yuvFrame->width, yuvFrame->height ); + SimpleModule::process( *currentFrame ); + } + } + } + } + + const char *name() { return "Video Camera Source"; } + Format inputFormat() { return "FRAME_ID_VIDEO_CAMERA_SOURCE"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } +}; +*/ + diff --git a/research/pipeline/PipelineManager.cpp b/research/pipeline/PipelineManager.cpp new file mode 100644 index 0000000..e003559 --- /dev/null +++ b/research/pipeline/PipelineManager.cpp @@ -0,0 +1,162 @@ + +class PipelineManager : public Thread { +public: + PipelineManager(); + void addSource( Format frameType ); + void addDestination( Format frameType ); + void clearTargets(); + void connectTogether(Module *m1, Module *m2, const Frame &f); + void makeConnections(Module *start); + void start( Frame *frame ) { Thread::start( (void *)frame ); } + void execute( void *p ); + void unconnectedRoute( Module *m, const Frame &f ); +private: + std::list sourceModules; + std::list destinationModules; + std::list source; + std::list destination; +}; + + +PipelineManager *pipelineMgr = 0; + + +PipelineManager::PipelineManager() +{ +} + +/* +void PipelineManager::newModule( Module *m ) +{ + printf("adding module: %s\n", m->name() ); + + allModules.push_front( m ); + + // update source modules list + for ( list::iterator it = source.begin(); it != source.end(); ++it ) { + if ( (*it) == m->inputFormat() ) { + sourceModules.push_front( m ); + // Just add it once + break; + } + } + + // update destination modules list + for ( list::iterator it = destination.begin(); it != destination.end(); ++it ) { + if ( (*it) == m->outputFormat() ) { + destinationModules.push_front( m ); + // Just add it once + break; + } + } +} +*/ + +void PipelineManager::addSource( Format frameType ) +{ + // update source modules list + Module *m = moduleMapper()->findModuleWithInputFormat( frameType ); + if ( m ) { + printf("adding source: %s\n", (const char *)frameType ); + source.push_front( frameType ); + sourceModules.push_front( m ); + } else { + printf("No source for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::addDestination( Format frameType ) +{ + Module *m = moduleMapper()->findModuleWithOutputFormat( frameType ); + if ( m ) { + printf("adding destination: %s\n", (const char *)frameType ); + destination.push_front( frameType ); + destinationModules.push_front( m ); + } else { + printf("No destination for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::clearTargets() +{ + sourceModules.clear(); + destinationModules.clear(); + source.clear(); + destination.clear(); +} + +void PipelineManager::connectTogether( Module *m1, Module *m2, const Frame &f ) +{ +/* + //printf(" [%s] -> [%s] %s", m1->outputFormat(), m2->inputFormat(), m2->name() ); + printf(" -> %s", m2->name() ); + + staticDispatch( m2, Init, 0 ); + + if ( m2->isBlocking() || m1->isBlocking() ) { + ThreadBoundryModule *threadModule = new ThreadBoundryModule( 32, m2->inputFormat() ); + threadModule->init(); + m1->connectTo( threadModule, f ); + threadModule->connectTo( m2, f ); + } else { + m1->connectTo( m2, f ); + } +*/ +} + +/* + Connects together module with a module that can process the frame + and then gets the module to process this first frame +*/ +void PipelineManager::unconnectedRoute( Module *m, const Frame &f ) +{ + Module *m2 = moduleMapper()->findModuleWithInputFormat( f.id() ); + if ( m2 ) { + //connectTogether( m, m2, f ); + printf("Connecting together: %s -> %s\n", m->name(), m2->name() ); + staticDispatch( m2, Init, 0 ); + m->connectTo( m2, f ); + m2->connectedFrom( m, f ); + staticDispatch( m2, Process, &f ); + } else { + printf("Didn't find route for %s\n", m->name()); + } +} + +void PipelineManager::makeConnections( Module *start ) +{ +/* + printf("making connections:\n"); + + Frame frame( "UNKNOWN", 0 ); + Module *currentModule = start; + Format dstFmt = destination.front(); + + dispatch( currentModule, Init, 0 ); + printf(" %s (pid: %i)", currentModule->name(), getpid() ); + + while ( currentModule->outputFormat() != dstFmt ) { + Module *m = moduleMapper()->findModuleWithInputFormat( currentModule->outputFormat() ); + if ( m ) { + connectTogether( currentModule, m, frame ); + currentModule = m; + } else { + break; + } + } + printf("\n"); +*/ +} + + +void PipelineManager::execute( void *d ) +{ + printf("starting...\n"); + for ( list::iterator it = sourceModules.begin(); it != sourceModules.end(); ++it ) { + //makeConnections( (*it) ); + staticDispatch( (*it), Init, 0 ); + staticDispatch( (*it), Process, d ); + } +} + + diff --git a/research/pipeline/README.md b/research/pipeline/README.md new file mode 100644 index 0000000..8df026f --- /dev/null +++ b/research/pipeline/README.md @@ -0,0 +1,30 @@ + + +Example sources to support: + +file:/home/user/Documents/images/jpeg/picture.jpg +file:/home/user/Documents/audio/mpeg/greatestsong.mp3 +file:/home/user/Documents/application/playlist/favourites.pls +file:/home/user/Documents/application/playlist/favourites.mpu +http://www.slashdot.org/somefile.mpg +http://www.streaming_radio_server.net:9000 +http://www.streaming_tv_server.net:9000 +camera +microphone +camera & microphone + + +Example outputs to support: + +File/URL +UDP packets +TCP/IP packets +OSS +Alsa +QSS +Visualiser +QDirectPainter +QPainter +XShm +DirectDraw +YUV acceleration diff --git a/research/pipeline/Types/Deadcode.cpp b/research/pipeline/Types/Deadcode.cpp new file mode 100644 index 0000000..d08e52a --- /dev/null +++ b/research/pipeline/Types/Deadcode.cpp @@ -0,0 +1,140 @@ + + +#if 0 + +1 = registerNewFormat("AAC", ".aac", "An AAC decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MP3 decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MAD decoder", AUDIO_CODEC); +1 = registerNewFormat("AAC", ".aac", "My AAC decoder", AUDIO_CODEC); +3 = registerNewFormat("3DS", ".3ds", "3D Studio File", AUDIO_CODEC); + +enum FormatType { + FRAME_ID_FILE_PROTO, + FRAME_ID_HTTP_PROTO, + FRAME_ID_RTSP_PROTO, + FRAME_ID_RTP_PROTO, + FRAME_ID_MMS_PROTO, + + FRAME_ID_GIF_FORMAT, + FRAME_ID_JPG_FORMAT, + FRAME_ID_PNG_FORMAT, + + FRAME_ID_MP3_FORMAT, + FRAME_ID_WAV_FORMAT, + FRAME_ID_GSM_FORMAT, + FRAME_ID_AMR_FORMAT, + + FRAME_ID_MPG_FORMAT, + FRAME_ID_AVI_FORMAT, + FRAME_ID_MP4_FORMAT, + FRAME_ID_MOV_FORMAT, + + FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG_AUDIO_PACKET = FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET, + FRAME_ID_GSM_AUDIO_PACKET, + FRAME_ID_AMR_AUDIO_PACKET, + FRAME_ID_AAC_AUDIO_PACKET, + FRAME_ID_LAST_PACKET_TYPE = FRAME_ID_AMR_AUDIO_PACKET, + + FRAME_ID_VIDEO_PACKET, + FRAME_ID_AUDIO_PACKET, + + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME, + + FRAME_ID_PCM_AUDIO_DATA, + + FRAME_ID_RENDERED_AUDIO, + FRAME_ID_RENDERED_VIDEO, + + FRAME_ID_URL_SOURCE, + FRAME_ID_AUDIO_SOURCE, + FRAME_ID_VIDEO_SOURCE, + + FRAME_ID_MULTIPLE_FORMAT, + FRAME_ID_ANY_ONE_OF_FORMAT, + + FRAME_ID_MULTIPLE_PACKET, + FRAME_ID_ANY_ONE_OF_PACKET, + + FRAME_ID_UNKNOWN +}; + +typedef struct FRAME_GENERIC { +/* + int generalId; + int specificId; + int streamId; +*/ + int bytes; + char* bits; + int pts; +}; + +enum videoCodecId { + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET +}; + +typedef struct FRAME_VIDEO_PACKET { + int codecId; + int bytes; + char* bits; +}; + +enum videoFrameFormat { + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME +}; + +typedef struct FRAME_VIDEO_FRAME { + int format; + int width; + int height; + int bytes; + char* bits; +}; + +struct UpPCMPacket { + int freq; + int bitsPerSample; + int size; + char data[1]; +}; + +struct DownPCMPacket { + +}; + +#endif + + + +/* +struct StreamPacket { + void *private; // AVPacket *pkt; + int streamId; + int size; + char *data; +}; +*/ + +/* +struct StreamPacket { + int streamId; + Frame frame; +}; +*/ + diff --git a/research/pipeline/Types/Format.hpp b/research/pipeline/Types/Format.hpp new file mode 100644 index 0000000..72642b6 --- /dev/null +++ b/research/pipeline/Types/Format.hpp @@ -0,0 +1,29 @@ +#pragma once +#include + +// Format +class Format +{ +public: + Format() : s(nullptr) { } + Format(const Format &other) : s( other.s ) { } + Format(const char *str) : s( str ) { } + bool operator==(const Format& other) + { + return !std::strcmp(other.s, s); + } + operator const char *() + { + return s; + } + bool operator==(const char *other) + { + return !std::strcmp(s, other); + } + bool operator<(const Format& other) const + { + return std::strcmp(other.s, s) < 0; + } +private: + const char *s; +}; diff --git a/research/pipeline/Types/Frame.hpp b/research/pipeline/Types/Frame.hpp new file mode 100644 index 0000000..35ddb08 --- /dev/null +++ b/research/pipeline/Types/Frame.hpp @@ -0,0 +1,51 @@ +#pragma once +#include +#include "Format.hpp" + +// Frame +class Frame +{ +public: + Frame() { } + + Frame( const char* id, void* data ) + : counter( 0 ) + , type( id ) + , bits( data ) + { + pthread_mutex_init( &mutex, NULL ); + } + + void ref() const + { + pthread_mutex_lock( &mutex ); + ++counter; + pthread_mutex_unlock( &mutex ); + } + + void deref() const + { + pthread_mutex_lock( &mutex ); + --counter; + pthread_mutex_unlock( &mutex ); + } + + int refcount() const + { + int ret; + pthread_mutex_lock( &mutex ); + ret = counter; + pthread_mutex_unlock( &mutex ); + return ret; + } + + Format id() const { return type; } + void* data() const { return bits; } + +private: + mutable pthread_mutex_t mutex; + mutable int counter; + Format type; + void *bits; +}; + diff --git a/research/pipeline/Types/Module.hpp b/research/pipeline/Types/Module.hpp new file mode 100644 index 0000000..f0ad0fc --- /dev/null +++ b/research/pipeline/Types/Module.hpp @@ -0,0 +1,118 @@ +#pragma once +#include +#include +#include +#include "Frame.hpp" +#include "Format.hpp" + +class Module; + +enum Commands { Init, Pull, Deref, Process, Simulate, ConnectToModule, ConnectedFrom }; + +typedef Module *Address; + +struct Command { + Address address; + Commands command; + const void *arg; +}; + +// CommandQueue +class CommandQueue { +public: + CommandQueue( int size ); + + void add( const Command & ); + const Command &remove(); + +private: + int max; + const Command **commands; + int in, out; + + pthread_mutex_t mutex; + sem_t free; + sem_t used; +}; + +CommandQueue::CommandQueue( int size ) + : max( size ), in( 0 ), out( 0 ) +{ + commands = new const Command*[max]; + pthread_mutex_init( &mutex, NULL ); + sem_init( &free, 0, max ); + sem_init( &used, 0, 0 ); +} + +void CommandQueue::add( const Command &command ) +{ + while( sem_wait( &free ) != 0 ); + pthread_mutex_lock( &mutex ); + + commands[in] = &command; + in = ( in + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &used ); +} + +const Command &CommandQueue::remove() +{ + while( sem_wait( &used ) != 0 ); + pthread_mutex_lock( &mutex ); + + const Command *command = commands[out]; + out = ( out + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &free ); + + return *command; +} + + + +class ModuleFactory { +public: + ModuleFactory() { } + + virtual const char *name() = 0; + + virtual std::list
threadAffinity() = 0; + virtual bool isBlocking() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; + virtual bool supportsInputFormat( Format ) = 0; + virtual bool supportsOutputFormat( Format ) = 0; + + virtual Module *createInstance() = 0; +}; + + + +// Modules +class Module { +public: + Module() { } + + virtual const char *name() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; +// virtual bool constFrameProcessing() = 0; + +// virtual bool supportsInputType( Format ) = 0; + virtual bool supportsOutputType( Format ) = 0; + +// virtual list inputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } +// virtual list outputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } + + virtual bool isBlocking() = 0;//{ return false; } + virtual std::list
threadAffinity() = 0; + +// virtual void command( Command command, const void *arg, bool priorityFlag ) = 0; + virtual void command( Commands command, const void *arg ) = 0; + + virtual void connectTo( Module *next, const Frame &f ) = 0; + virtual void connectedFrom( Module *next, const Frame &f ) = 0; +}; + diff --git a/research/pipeline/Types/PCMData.hpp b/research/pipeline/Types/PCMData.hpp new file mode 100644 index 0000000..57de038 --- /dev/null +++ b/research/pipeline/Types/PCMData.hpp @@ -0,0 +1,7 @@ +#pragma once + +struct PCMData +{ + int size; + char data[65536]; +}; diff --git a/research/pipeline/Types/Thread.hpp b/research/pipeline/Types/Thread.hpp new file mode 100644 index 0000000..d7922a2 --- /dev/null +++ b/research/pipeline/Types/Thread.hpp @@ -0,0 +1,41 @@ +#pragma once + +// Utils +class Thread { +public: + Thread(); + int start( void* arg ); + +protected: + int run( void* arg ); + static void* entryPoint( void* ); + virtual void setup() { }; + virtual void execute( void* ) = 0; + void* arg() const { return arg_; } + void setArg( void* a ) { arg_ = a; } + +private: + pthread_t tid_; + void* arg_; +}; + +Thread::Thread() {} + +int Thread::start( void* arg ) +{ + setArg(arg); + return pthread_create( &tid_, 0, Thread::entryPoint, this ); +} + +int Thread::run( void* arg ) +{ + printf(" (pid: %i)", getpid() ); + setup(); + execute( arg ); +} + +void* Thread::entryPoint( void* pthis ) +{ + Thread* pt = (Thread*)pthis; + pt->run( pt->arg() ); +} diff --git a/research/pipeline/Types/YUVFrame.hpp b/research/pipeline/Types/YUVFrame.hpp new file mode 100644 index 0000000..109f9a4 --- /dev/null +++ b/research/pipeline/Types/YUVFrame.hpp @@ -0,0 +1,16 @@ +#pragma once +#include "libavcodec/avcodec.h" + + +struct YUVFrame { + int width; + int height; + enum AVPixelFormat fmt; + AVFrame *pic; +/* + uchar *y; + uchar *u; + uchar *v; + int scanlineWidth[3]; +*/ +}; diff --git a/research/pipeline/prototype.cpp b/research/pipeline/prototype.cpp new file mode 100755 index 0000000..a0c03f9 --- /dev/null +++ b/research/pipeline/prototype.cpp @@ -0,0 +1,172 @@ +/* + + Project Carmack 0.01 (AKA Media Library Prototype 01/02) + Copyright John Ryland, 2005 + +*/ + +using namespace std; + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "Types/Frame.hpp" +#include "Types/Thread.hpp" +#include "Types/Module.hpp" + + +#define WIDTH 160 +#define HEIGHT 120 + + + + +/* +class ModulesThread : public Thread, public DispatchInterface { +public: + void execute( void* ) + { + for (;;) { + CommandStruct *command = buffer.remove(); + command->module->command( command->command, command->arg ); + } + } + + void dispatch( CommandStruct *command ) + { + buffer.add( command ); + } + +private: + CommandQueue buffer; +}; +*/ + + + + +static void staticDispatch( Address address, Commands command, const void *arg ) +{ + moduleMapper()->dispatchCommand( address, command, arg ); +} + + + + +struct FFMpegStreamPacket { + AVPacket *packet; +}; + + + + + +void ProcessMessages(); + + + + + + + +Module *a, *b, *c, *d; + + +void registerModules() +{ + moduleMapper()->addModule( new OSSRenderer ); +// moduleMapper()->addModule( d = new YUVRenderer ); + moduleMapper()->addModule( d = new DirectDrawRenderer ); + moduleMapper()->addModule( new MP3DecodeModule ); +// moduleMapper()->addModule( new FFMpegMuxModule ); + moduleMapper()->addModule( new MpegDecodeModule ); +// moduleMapper()->addModule( new MP3SourceModule ); +// moduleMapper()->addModule( new StreamDemuxModule ); + moduleMapper()->addModule( c = new MpegEncodeModule ); +// moduleMapper()->addModule( b = new Splitter ); + moduleMapper()->addModule( new FFMpegSourceModule ); +// moduleMapper()->addModule( a = new VideoCameraSourceModule ); +} + +void playFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_AUDIO" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + //pipelineMgr->start( &file ); + pipelineMgr->execute( &file ); +} + + +void displayCamera() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +void reEncodeFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + pipelineMgr->start( &file ); +} + +void recordVideo() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +int main( int argc, char** argv ) +{ + registerModules(); + pipelineMgr = new PipelineManager; +/* + Frame f; + printf("Connecting together: %s -> %s\n", a->name(), b->name() ); + staticDispatch( b, Init, 0 ); + a->connectTo( b, f ); +// b->connectedFrom( a, f ); + + printf("Connecting together: %s -> %s\n", b->name(), c->name() ); + staticDispatch( c, Init, 0 ); + b->connectTo( c, f ); + + printf("Connecting together: %s -> %s\n", b->name(), d->name() ); + staticDispatch( d, Init, 0 ); + b->connectTo( d, f ); +*/ + playFile( (argc > 1) ? argv[1] : "test.mpg" ); + //reEncodeFile( (argc > 1) ? argv[1] : "test.mpg" ); + //displayCamera(); + //recordVideo(); +} + diff --git a/research/string-tables/.gitignore b/research/string-tables/.gitignore new file mode 100644 index 0000000..7c6ad91 --- /dev/null +++ b/research/string-tables/.gitignore @@ -0,0 +1,61 @@ +build/cmake_install.cmake +build/CMakeCache.txt +build/compile_commands.json +build/FixedStrings.inl +build/libProgram.a +build/libStringsTable.a +build/Makefile +build/StringsTableTest +build/CMakeFiles/cmake.check_cache +build/CMakeFiles/CMakeDirectoryInformation.cmake +build/CMakeFiles/CMakeOutput.log +build/CMakeFiles/CMakeRuleHashes.txt +build/CMakeFiles/feature_tests.bin +build/CMakeFiles/feature_tests.c +build/CMakeFiles/feature_tests.cxx +build/CMakeFiles/Makefile.cmake +build/CMakeFiles/Makefile2 +build/CMakeFiles/progress.marks +build/CMakeFiles/TargetDirectories.txt +build/CMakeFiles/3.5.1/CMakeCCompiler.cmake +build/CMakeFiles/3.5.1/CMakeCXXCompiler.cmake +build/CMakeFiles/3.5.1/CMakeDetermineCompilerABI_C.bin +build/CMakeFiles/3.5.1/CMakeDetermineCompilerABI_CXX.bin +build/CMakeFiles/3.5.1/CMakeSystem.cmake +build/CMakeFiles/3.5.1/CompilerIdC/a.out +build/CMakeFiles/3.5.1/CompilerIdC/CMakeCCompilerId.c +build/CMakeFiles/3.5.1/CompilerIdCXX/a.out +build/CMakeFiles/3.5.1/CompilerIdCXX/CMakeCXXCompilerId.cpp +build/CMakeFiles/Program.dir/build.make +build/CMakeFiles/Program.dir/cmake_clean_target.cmake +build/CMakeFiles/Program.dir/cmake_clean.cmake +build/CMakeFiles/Program.dir/CXX.includecache +build/CMakeFiles/Program.dir/depend.internal +build/CMakeFiles/Program.dir/depend.make +build/CMakeFiles/Program.dir/DependInfo.cmake +build/CMakeFiles/Program.dir/flags.make +build/CMakeFiles/Program.dir/link.txt +build/CMakeFiles/Program.dir/program.cpp.o +build/CMakeFiles/Program.dir/progress.make +build/CMakeFiles/StringsTable.dir/build.make +build/CMakeFiles/StringsTable.dir/cmake_clean_target.cmake +build/CMakeFiles/StringsTable.dir/cmake_clean.cmake +build/CMakeFiles/StringsTable.dir/CXX.includecache +build/CMakeFiles/StringsTable.dir/depend.internal +build/CMakeFiles/StringsTable.dir/depend.make +build/CMakeFiles/StringsTable.dir/DependInfo.cmake +build/CMakeFiles/StringsTable.dir/FixedStrings.cpp.o +build/CMakeFiles/StringsTable.dir/flags.make +build/CMakeFiles/StringsTable.dir/link.txt +build/CMakeFiles/StringsTable.dir/progress.make +build/CMakeFiles/StringsTableTest.dir/build.make +build/CMakeFiles/StringsTableTest.dir/cmake_clean.cmake +build/CMakeFiles/StringsTableTest.dir/CXX.includecache +build/CMakeFiles/StringsTableTest.dir/depend.internal +build/CMakeFiles/StringsTableTest.dir/depend.make +build/CMakeFiles/StringsTableTest.dir/DependInfo.cmake +build/CMakeFiles/StringsTableTest.dir/flags.make +build/CMakeFiles/StringsTableTest.dir/link.txt +build/CMakeFiles/StringsTableTest.dir/main.cpp.o +build/CMakeFiles/StringsTableTest.dir/progress.make +README.pdf diff --git a/research/string-tables/.vscode/Code.code-workspace b/research/string-tables/.vscode/Code.code-workspace new file mode 100644 index 0000000..c7e938e --- /dev/null +++ b/research/string-tables/.vscode/Code.code-workspace @@ -0,0 +1,49 @@ +{ + "folders": [ + { + "path": ".." + }, + { + "path": "../../framework" + } + ], + "settings": { + "files.associations": { + "*.tpp": "cpp", + "functional": "cpp", + "optional": "cpp", + "array": "cpp", + "*.tcc": "cpp", + "cctype": "cpp", + "clocale": "cpp", + "cmath": "cpp", + "cstdarg": "cpp", + "cstdint": "cpp", + "cstdio": "cpp", + "cstdlib": "cpp", + "cwchar": "cpp", + "cwctype": "cpp", + "deque": "cpp", + "unordered_map": "cpp", + "vector": "cpp", + "exception": "cpp", + "algorithm": "cpp", + "system_error": "cpp", + "tuple": "cpp", + "type_traits": "cpp", + "fstream": "cpp", + "initializer_list": "cpp", + "iosfwd": "cpp", + "istream": "cpp", + "limits": "cpp", + "new": "cpp", + "ostream": "cpp", + "numeric": "cpp", + "sstream": "cpp", + "stdexcept": "cpp", + "streambuf": "cpp", + "utility": "cpp", + "typeinfo": "cpp" + } + } +} \ No newline at end of file diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/Modules/ThreadBoundaryModule.cpp b/research/pipeline/Modules/ThreadBoundaryModule.cpp new file mode 100644 index 0000000..e4b07d4 --- /dev/null +++ b/research/pipeline/Modules/ThreadBoundaryModule.cpp @@ -0,0 +1,89 @@ + +/* + +class Consumer : public RoutingModule { +public: + Consumer( CommandQueue* b, Format format ) + : RoutingModule(), buffer( b ), formatId( format ) + { } + + void init() + { + } + + void start() + { + for (;;) { + const Command &command = buffer->remove(); + RoutingModule::command( command.command, command.arg ); + } + } + + const char* name() { return "Consumer"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue *buffer; + Format formatId; +}; + +class ConsumerThread : public Thread { +public: + ConsumerThread( Consumer *c ) + : consumer( c ) + { } + + void execute( void* ) + { + consumer->start(); + } + +private: + Consumer *consumer; +}; + + +class ThreadBoundryModule : public RoutingModule { +public: + ThreadBoundryModule( int size, Format format ) + : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ), + consumerThread( &consumer ), formatId( format ) + { + } + + void init() + { + } + + void connectTo( Module *m, const Frame &f ) + { + consumer.connectTo( m, f ); + consumerThread.start(0); + } + + void process( const Frame &frame ) + { + readCommandQueue.add( frame ); + } + + const char *name() { return "Thread Boundry Module"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue readCommandQueue; + Consumer consumer; + ConsumerThread consumerThread; + Format formatId; +}; + + +class ProcessBoundryThing : public DispatchInterface { +public: + void dispatch( Command *command ) + { + } +}; + +*/ diff --git a/research/pipeline/Modules/VideoCameraSourceModule.cpp b/research/pipeline/Modules/VideoCameraSourceModule.cpp new file mode 100644 index 0000000..deef2f9 --- /dev/null +++ b/research/pipeline/Modules/VideoCameraSourceModule.cpp @@ -0,0 +1,101 @@ + + +/* +class VideoCameraSourceModule : public SimpleModule { +public: + VideoCameraSourceModule() + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + AVFormatContext *avFormatContext = 0; + AVFormatParameters vp1, *vp = &vp1; + AVInputFormat *fmt1; + memset(vp, 0, sizeof(*vp)); + fmt1 = av_find_input_format("video4linux");//video_grab_format); + vp->device = 0;//"/dev/video";//video_device; + vp->channel = 0;//video_channel; + vp->standard = "pal";//"ntsc";//video_standard; + vp->width = WIDTH; + vp->height = HEIGHT; + vp->frame_rate = 50; + vp->frame_rate_base = 1; + if (av_open_input_file(&avFormatContext, "", fmt1, 0, vp) < 0) { + printf("Could not find video grab device\n"); + exit(1); + } + if ((avFormatContext->ctx_flags & AVFMTCTX_NOHEADER) && av_find_stream_info(avFormatContext) < 0) { + printf("Could not find video grab parameters\n"); + exit(1); + } + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + +// AVCodecContext *videoCodecContext = avcodec_alloc_context(); + AVCodecContext *videoCodecContext = &avFormatContext->streams[0]->codec; + AVCodec *codec = avcodec_find_decoder(avFormatContext->streams[0]->codec.codec_id); + + if ( !codec ) { + printf("error finding decoder\n"); + return; + } + + printf("found decoder: %s\n", codec->name); + + avFormatContext->streams[0]->r_frame_rate = vp->frame_rate; + avFormatContext->streams[0]->r_frame_rate_base = vp->frame_rate_base; + + videoCodecContext->width = vp->width; + videoCodecContext->height = vp->height; + +// if ( avcodec_open( videoCodecContext, &rawvideo_decoder ) < 0 ) { + if ( avcodec_open( videoCodecContext, codec ) < 0 ) { // is rawvideo_decoder + printf("error opening context\n"); + videoCodecContext = 0; + } + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + AVPacket pkt; + while( avFormatContext ) { + if ( av_read_frame(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + AVFrame *picture = avcodec_alloc_frame(); + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = picture; + Frame *currentFrame = new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + currentFrame->ref(); + + int gotPicture = 0; + avcodec_decode_video( videoCodecContext, picture, &gotPicture, pkt.data, pkt.size ); + + if ( gotPicture ) { + yuvFrame->fmt = videoCodecContext->pix_fmt; // is PIX_FMT_YUV422 + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; +// printf("showing frame: %i %ix%i\n", yuvFrame->fmt, yuvFrame->width, yuvFrame->height ); + SimpleModule::process( *currentFrame ); + } + } + } + } + + const char *name() { return "Video Camera Source"; } + Format inputFormat() { return "FRAME_ID_VIDEO_CAMERA_SOURCE"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } +}; +*/ + diff --git a/research/pipeline/PipelineManager.cpp b/research/pipeline/PipelineManager.cpp new file mode 100644 index 0000000..e003559 --- /dev/null +++ b/research/pipeline/PipelineManager.cpp @@ -0,0 +1,162 @@ + +class PipelineManager : public Thread { +public: + PipelineManager(); + void addSource( Format frameType ); + void addDestination( Format frameType ); + void clearTargets(); + void connectTogether(Module *m1, Module *m2, const Frame &f); + void makeConnections(Module *start); + void start( Frame *frame ) { Thread::start( (void *)frame ); } + void execute( void *p ); + void unconnectedRoute( Module *m, const Frame &f ); +private: + std::list sourceModules; + std::list destinationModules; + std::list source; + std::list destination; +}; + + +PipelineManager *pipelineMgr = 0; + + +PipelineManager::PipelineManager() +{ +} + +/* +void PipelineManager::newModule( Module *m ) +{ + printf("adding module: %s\n", m->name() ); + + allModules.push_front( m ); + + // update source modules list + for ( list::iterator it = source.begin(); it != source.end(); ++it ) { + if ( (*it) == m->inputFormat() ) { + sourceModules.push_front( m ); + // Just add it once + break; + } + } + + // update destination modules list + for ( list::iterator it = destination.begin(); it != destination.end(); ++it ) { + if ( (*it) == m->outputFormat() ) { + destinationModules.push_front( m ); + // Just add it once + break; + } + } +} +*/ + +void PipelineManager::addSource( Format frameType ) +{ + // update source modules list + Module *m = moduleMapper()->findModuleWithInputFormat( frameType ); + if ( m ) { + printf("adding source: %s\n", (const char *)frameType ); + source.push_front( frameType ); + sourceModules.push_front( m ); + } else { + printf("No source for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::addDestination( Format frameType ) +{ + Module *m = moduleMapper()->findModuleWithOutputFormat( frameType ); + if ( m ) { + printf("adding destination: %s\n", (const char *)frameType ); + destination.push_front( frameType ); + destinationModules.push_front( m ); + } else { + printf("No destination for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::clearTargets() +{ + sourceModules.clear(); + destinationModules.clear(); + source.clear(); + destination.clear(); +} + +void PipelineManager::connectTogether( Module *m1, Module *m2, const Frame &f ) +{ +/* + //printf(" [%s] -> [%s] %s", m1->outputFormat(), m2->inputFormat(), m2->name() ); + printf(" -> %s", m2->name() ); + + staticDispatch( m2, Init, 0 ); + + if ( m2->isBlocking() || m1->isBlocking() ) { + ThreadBoundryModule *threadModule = new ThreadBoundryModule( 32, m2->inputFormat() ); + threadModule->init(); + m1->connectTo( threadModule, f ); + threadModule->connectTo( m2, f ); + } else { + m1->connectTo( m2, f ); + } +*/ +} + +/* + Connects together module with a module that can process the frame + and then gets the module to process this first frame +*/ +void PipelineManager::unconnectedRoute( Module *m, const Frame &f ) +{ + Module *m2 = moduleMapper()->findModuleWithInputFormat( f.id() ); + if ( m2 ) { + //connectTogether( m, m2, f ); + printf("Connecting together: %s -> %s\n", m->name(), m2->name() ); + staticDispatch( m2, Init, 0 ); + m->connectTo( m2, f ); + m2->connectedFrom( m, f ); + staticDispatch( m2, Process, &f ); + } else { + printf("Didn't find route for %s\n", m->name()); + } +} + +void PipelineManager::makeConnections( Module *start ) +{ +/* + printf("making connections:\n"); + + Frame frame( "UNKNOWN", 0 ); + Module *currentModule = start; + Format dstFmt = destination.front(); + + dispatch( currentModule, Init, 0 ); + printf(" %s (pid: %i)", currentModule->name(), getpid() ); + + while ( currentModule->outputFormat() != dstFmt ) { + Module *m = moduleMapper()->findModuleWithInputFormat( currentModule->outputFormat() ); + if ( m ) { + connectTogether( currentModule, m, frame ); + currentModule = m; + } else { + break; + } + } + printf("\n"); +*/ +} + + +void PipelineManager::execute( void *d ) +{ + printf("starting...\n"); + for ( list::iterator it = sourceModules.begin(); it != sourceModules.end(); ++it ) { + //makeConnections( (*it) ); + staticDispatch( (*it), Init, 0 ); + staticDispatch( (*it), Process, d ); + } +} + + diff --git a/research/pipeline/README.md b/research/pipeline/README.md new file mode 100644 index 0000000..8df026f --- /dev/null +++ b/research/pipeline/README.md @@ -0,0 +1,30 @@ + + +Example sources to support: + +file:/home/user/Documents/images/jpeg/picture.jpg +file:/home/user/Documents/audio/mpeg/greatestsong.mp3 +file:/home/user/Documents/application/playlist/favourites.pls +file:/home/user/Documents/application/playlist/favourites.mpu +http://www.slashdot.org/somefile.mpg +http://www.streaming_radio_server.net:9000 +http://www.streaming_tv_server.net:9000 +camera +microphone +camera & microphone + + +Example outputs to support: + +File/URL +UDP packets +TCP/IP packets +OSS +Alsa +QSS +Visualiser +QDirectPainter +QPainter +XShm +DirectDraw +YUV acceleration diff --git a/research/pipeline/Types/Deadcode.cpp b/research/pipeline/Types/Deadcode.cpp new file mode 100644 index 0000000..d08e52a --- /dev/null +++ b/research/pipeline/Types/Deadcode.cpp @@ -0,0 +1,140 @@ + + +#if 0 + +1 = registerNewFormat("AAC", ".aac", "An AAC decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MP3 decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MAD decoder", AUDIO_CODEC); +1 = registerNewFormat("AAC", ".aac", "My AAC decoder", AUDIO_CODEC); +3 = registerNewFormat("3DS", ".3ds", "3D Studio File", AUDIO_CODEC); + +enum FormatType { + FRAME_ID_FILE_PROTO, + FRAME_ID_HTTP_PROTO, + FRAME_ID_RTSP_PROTO, + FRAME_ID_RTP_PROTO, + FRAME_ID_MMS_PROTO, + + FRAME_ID_GIF_FORMAT, + FRAME_ID_JPG_FORMAT, + FRAME_ID_PNG_FORMAT, + + FRAME_ID_MP3_FORMAT, + FRAME_ID_WAV_FORMAT, + FRAME_ID_GSM_FORMAT, + FRAME_ID_AMR_FORMAT, + + FRAME_ID_MPG_FORMAT, + FRAME_ID_AVI_FORMAT, + FRAME_ID_MP4_FORMAT, + FRAME_ID_MOV_FORMAT, + + FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG_AUDIO_PACKET = FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET, + FRAME_ID_GSM_AUDIO_PACKET, + FRAME_ID_AMR_AUDIO_PACKET, + FRAME_ID_AAC_AUDIO_PACKET, + FRAME_ID_LAST_PACKET_TYPE = FRAME_ID_AMR_AUDIO_PACKET, + + FRAME_ID_VIDEO_PACKET, + FRAME_ID_AUDIO_PACKET, + + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME, + + FRAME_ID_PCM_AUDIO_DATA, + + FRAME_ID_RENDERED_AUDIO, + FRAME_ID_RENDERED_VIDEO, + + FRAME_ID_URL_SOURCE, + FRAME_ID_AUDIO_SOURCE, + FRAME_ID_VIDEO_SOURCE, + + FRAME_ID_MULTIPLE_FORMAT, + FRAME_ID_ANY_ONE_OF_FORMAT, + + FRAME_ID_MULTIPLE_PACKET, + FRAME_ID_ANY_ONE_OF_PACKET, + + FRAME_ID_UNKNOWN +}; + +typedef struct FRAME_GENERIC { +/* + int generalId; + int specificId; + int streamId; +*/ + int bytes; + char* bits; + int pts; +}; + +enum videoCodecId { + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET +}; + +typedef struct FRAME_VIDEO_PACKET { + int codecId; + int bytes; + char* bits; +}; + +enum videoFrameFormat { + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME +}; + +typedef struct FRAME_VIDEO_FRAME { + int format; + int width; + int height; + int bytes; + char* bits; +}; + +struct UpPCMPacket { + int freq; + int bitsPerSample; + int size; + char data[1]; +}; + +struct DownPCMPacket { + +}; + +#endif + + + +/* +struct StreamPacket { + void *private; // AVPacket *pkt; + int streamId; + int size; + char *data; +}; +*/ + +/* +struct StreamPacket { + int streamId; + Frame frame; +}; +*/ + diff --git a/research/pipeline/Types/Format.hpp b/research/pipeline/Types/Format.hpp new file mode 100644 index 0000000..72642b6 --- /dev/null +++ b/research/pipeline/Types/Format.hpp @@ -0,0 +1,29 @@ +#pragma once +#include + +// Format +class Format +{ +public: + Format() : s(nullptr) { } + Format(const Format &other) : s( other.s ) { } + Format(const char *str) : s( str ) { } + bool operator==(const Format& other) + { + return !std::strcmp(other.s, s); + } + operator const char *() + { + return s; + } + bool operator==(const char *other) + { + return !std::strcmp(s, other); + } + bool operator<(const Format& other) const + { + return std::strcmp(other.s, s) < 0; + } +private: + const char *s; +}; diff --git a/research/pipeline/Types/Frame.hpp b/research/pipeline/Types/Frame.hpp new file mode 100644 index 0000000..35ddb08 --- /dev/null +++ b/research/pipeline/Types/Frame.hpp @@ -0,0 +1,51 @@ +#pragma once +#include +#include "Format.hpp" + +// Frame +class Frame +{ +public: + Frame() { } + + Frame( const char* id, void* data ) + : counter( 0 ) + , type( id ) + , bits( data ) + { + pthread_mutex_init( &mutex, NULL ); + } + + void ref() const + { + pthread_mutex_lock( &mutex ); + ++counter; + pthread_mutex_unlock( &mutex ); + } + + void deref() const + { + pthread_mutex_lock( &mutex ); + --counter; + pthread_mutex_unlock( &mutex ); + } + + int refcount() const + { + int ret; + pthread_mutex_lock( &mutex ); + ret = counter; + pthread_mutex_unlock( &mutex ); + return ret; + } + + Format id() const { return type; } + void* data() const { return bits; } + +private: + mutable pthread_mutex_t mutex; + mutable int counter; + Format type; + void *bits; +}; + diff --git a/research/pipeline/Types/Module.hpp b/research/pipeline/Types/Module.hpp new file mode 100644 index 0000000..f0ad0fc --- /dev/null +++ b/research/pipeline/Types/Module.hpp @@ -0,0 +1,118 @@ +#pragma once +#include +#include +#include +#include "Frame.hpp" +#include "Format.hpp" + +class Module; + +enum Commands { Init, Pull, Deref, Process, Simulate, ConnectToModule, ConnectedFrom }; + +typedef Module *Address; + +struct Command { + Address address; + Commands command; + const void *arg; +}; + +// CommandQueue +class CommandQueue { +public: + CommandQueue( int size ); + + void add( const Command & ); + const Command &remove(); + +private: + int max; + const Command **commands; + int in, out; + + pthread_mutex_t mutex; + sem_t free; + sem_t used; +}; + +CommandQueue::CommandQueue( int size ) + : max( size ), in( 0 ), out( 0 ) +{ + commands = new const Command*[max]; + pthread_mutex_init( &mutex, NULL ); + sem_init( &free, 0, max ); + sem_init( &used, 0, 0 ); +} + +void CommandQueue::add( const Command &command ) +{ + while( sem_wait( &free ) != 0 ); + pthread_mutex_lock( &mutex ); + + commands[in] = &command; + in = ( in + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &used ); +} + +const Command &CommandQueue::remove() +{ + while( sem_wait( &used ) != 0 ); + pthread_mutex_lock( &mutex ); + + const Command *command = commands[out]; + out = ( out + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &free ); + + return *command; +} + + + +class ModuleFactory { +public: + ModuleFactory() { } + + virtual const char *name() = 0; + + virtual std::list
threadAffinity() = 0; + virtual bool isBlocking() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; + virtual bool supportsInputFormat( Format ) = 0; + virtual bool supportsOutputFormat( Format ) = 0; + + virtual Module *createInstance() = 0; +}; + + + +// Modules +class Module { +public: + Module() { } + + virtual const char *name() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; +// virtual bool constFrameProcessing() = 0; + +// virtual bool supportsInputType( Format ) = 0; + virtual bool supportsOutputType( Format ) = 0; + +// virtual list inputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } +// virtual list outputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } + + virtual bool isBlocking() = 0;//{ return false; } + virtual std::list
threadAffinity() = 0; + +// virtual void command( Command command, const void *arg, bool priorityFlag ) = 0; + virtual void command( Commands command, const void *arg ) = 0; + + virtual void connectTo( Module *next, const Frame &f ) = 0; + virtual void connectedFrom( Module *next, const Frame &f ) = 0; +}; + diff --git a/research/pipeline/Types/PCMData.hpp b/research/pipeline/Types/PCMData.hpp new file mode 100644 index 0000000..57de038 --- /dev/null +++ b/research/pipeline/Types/PCMData.hpp @@ -0,0 +1,7 @@ +#pragma once + +struct PCMData +{ + int size; + char data[65536]; +}; diff --git a/research/pipeline/Types/Thread.hpp b/research/pipeline/Types/Thread.hpp new file mode 100644 index 0000000..d7922a2 --- /dev/null +++ b/research/pipeline/Types/Thread.hpp @@ -0,0 +1,41 @@ +#pragma once + +// Utils +class Thread { +public: + Thread(); + int start( void* arg ); + +protected: + int run( void* arg ); + static void* entryPoint( void* ); + virtual void setup() { }; + virtual void execute( void* ) = 0; + void* arg() const { return arg_; } + void setArg( void* a ) { arg_ = a; } + +private: + pthread_t tid_; + void* arg_; +}; + +Thread::Thread() {} + +int Thread::start( void* arg ) +{ + setArg(arg); + return pthread_create( &tid_, 0, Thread::entryPoint, this ); +} + +int Thread::run( void* arg ) +{ + printf(" (pid: %i)", getpid() ); + setup(); + execute( arg ); +} + +void* Thread::entryPoint( void* pthis ) +{ + Thread* pt = (Thread*)pthis; + pt->run( pt->arg() ); +} diff --git a/research/pipeline/Types/YUVFrame.hpp b/research/pipeline/Types/YUVFrame.hpp new file mode 100644 index 0000000..109f9a4 --- /dev/null +++ b/research/pipeline/Types/YUVFrame.hpp @@ -0,0 +1,16 @@ +#pragma once +#include "libavcodec/avcodec.h" + + +struct YUVFrame { + int width; + int height; + enum AVPixelFormat fmt; + AVFrame *pic; +/* + uchar *y; + uchar *u; + uchar *v; + int scanlineWidth[3]; +*/ +}; diff --git a/research/pipeline/prototype.cpp b/research/pipeline/prototype.cpp new file mode 100755 index 0000000..a0c03f9 --- /dev/null +++ b/research/pipeline/prototype.cpp @@ -0,0 +1,172 @@ +/* + + Project Carmack 0.01 (AKA Media Library Prototype 01/02) + Copyright John Ryland, 2005 + +*/ + +using namespace std; + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "Types/Frame.hpp" +#include "Types/Thread.hpp" +#include "Types/Module.hpp" + + +#define WIDTH 160 +#define HEIGHT 120 + + + + +/* +class ModulesThread : public Thread, public DispatchInterface { +public: + void execute( void* ) + { + for (;;) { + CommandStruct *command = buffer.remove(); + command->module->command( command->command, command->arg ); + } + } + + void dispatch( CommandStruct *command ) + { + buffer.add( command ); + } + +private: + CommandQueue buffer; +}; +*/ + + + + +static void staticDispatch( Address address, Commands command, const void *arg ) +{ + moduleMapper()->dispatchCommand( address, command, arg ); +} + + + + +struct FFMpegStreamPacket { + AVPacket *packet; +}; + + + + + +void ProcessMessages(); + + + + + + + +Module *a, *b, *c, *d; + + +void registerModules() +{ + moduleMapper()->addModule( new OSSRenderer ); +// moduleMapper()->addModule( d = new YUVRenderer ); + moduleMapper()->addModule( d = new DirectDrawRenderer ); + moduleMapper()->addModule( new MP3DecodeModule ); +// moduleMapper()->addModule( new FFMpegMuxModule ); + moduleMapper()->addModule( new MpegDecodeModule ); +// moduleMapper()->addModule( new MP3SourceModule ); +// moduleMapper()->addModule( new StreamDemuxModule ); + moduleMapper()->addModule( c = new MpegEncodeModule ); +// moduleMapper()->addModule( b = new Splitter ); + moduleMapper()->addModule( new FFMpegSourceModule ); +// moduleMapper()->addModule( a = new VideoCameraSourceModule ); +} + +void playFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_AUDIO" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + //pipelineMgr->start( &file ); + pipelineMgr->execute( &file ); +} + + +void displayCamera() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +void reEncodeFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + pipelineMgr->start( &file ); +} + +void recordVideo() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +int main( int argc, char** argv ) +{ + registerModules(); + pipelineMgr = new PipelineManager; +/* + Frame f; + printf("Connecting together: %s -> %s\n", a->name(), b->name() ); + staticDispatch( b, Init, 0 ); + a->connectTo( b, f ); +// b->connectedFrom( a, f ); + + printf("Connecting together: %s -> %s\n", b->name(), c->name() ); + staticDispatch( c, Init, 0 ); + b->connectTo( c, f ); + + printf("Connecting together: %s -> %s\n", b->name(), d->name() ); + staticDispatch( d, Init, 0 ); + b->connectTo( d, f ); +*/ + playFile( (argc > 1) ? argv[1] : "test.mpg" ); + //reEncodeFile( (argc > 1) ? argv[1] : "test.mpg" ); + //displayCamera(); + //recordVideo(); +} + diff --git a/research/string-tables/.gitignore b/research/string-tables/.gitignore new file mode 100644 index 0000000..7c6ad91 --- /dev/null +++ b/research/string-tables/.gitignore @@ -0,0 +1,61 @@ +build/cmake_install.cmake +build/CMakeCache.txt +build/compile_commands.json +build/FixedStrings.inl +build/libProgram.a +build/libStringsTable.a +build/Makefile +build/StringsTableTest +build/CMakeFiles/cmake.check_cache +build/CMakeFiles/CMakeDirectoryInformation.cmake +build/CMakeFiles/CMakeOutput.log +build/CMakeFiles/CMakeRuleHashes.txt +build/CMakeFiles/feature_tests.bin +build/CMakeFiles/feature_tests.c +build/CMakeFiles/feature_tests.cxx +build/CMakeFiles/Makefile.cmake +build/CMakeFiles/Makefile2 +build/CMakeFiles/progress.marks +build/CMakeFiles/TargetDirectories.txt +build/CMakeFiles/3.5.1/CMakeCCompiler.cmake +build/CMakeFiles/3.5.1/CMakeCXXCompiler.cmake +build/CMakeFiles/3.5.1/CMakeDetermineCompilerABI_C.bin +build/CMakeFiles/3.5.1/CMakeDetermineCompilerABI_CXX.bin +build/CMakeFiles/3.5.1/CMakeSystem.cmake +build/CMakeFiles/3.5.1/CompilerIdC/a.out +build/CMakeFiles/3.5.1/CompilerIdC/CMakeCCompilerId.c +build/CMakeFiles/3.5.1/CompilerIdCXX/a.out +build/CMakeFiles/3.5.1/CompilerIdCXX/CMakeCXXCompilerId.cpp +build/CMakeFiles/Program.dir/build.make +build/CMakeFiles/Program.dir/cmake_clean_target.cmake +build/CMakeFiles/Program.dir/cmake_clean.cmake +build/CMakeFiles/Program.dir/CXX.includecache +build/CMakeFiles/Program.dir/depend.internal +build/CMakeFiles/Program.dir/depend.make +build/CMakeFiles/Program.dir/DependInfo.cmake +build/CMakeFiles/Program.dir/flags.make +build/CMakeFiles/Program.dir/link.txt +build/CMakeFiles/Program.dir/program.cpp.o +build/CMakeFiles/Program.dir/progress.make +build/CMakeFiles/StringsTable.dir/build.make +build/CMakeFiles/StringsTable.dir/cmake_clean_target.cmake +build/CMakeFiles/StringsTable.dir/cmake_clean.cmake +build/CMakeFiles/StringsTable.dir/CXX.includecache +build/CMakeFiles/StringsTable.dir/depend.internal +build/CMakeFiles/StringsTable.dir/depend.make +build/CMakeFiles/StringsTable.dir/DependInfo.cmake +build/CMakeFiles/StringsTable.dir/FixedStrings.cpp.o +build/CMakeFiles/StringsTable.dir/flags.make +build/CMakeFiles/StringsTable.dir/link.txt +build/CMakeFiles/StringsTable.dir/progress.make +build/CMakeFiles/StringsTableTest.dir/build.make +build/CMakeFiles/StringsTableTest.dir/cmake_clean.cmake +build/CMakeFiles/StringsTableTest.dir/CXX.includecache +build/CMakeFiles/StringsTableTest.dir/depend.internal +build/CMakeFiles/StringsTableTest.dir/depend.make +build/CMakeFiles/StringsTableTest.dir/DependInfo.cmake +build/CMakeFiles/StringsTableTest.dir/flags.make +build/CMakeFiles/StringsTableTest.dir/link.txt +build/CMakeFiles/StringsTableTest.dir/main.cpp.o +build/CMakeFiles/StringsTableTest.dir/progress.make +README.pdf diff --git a/research/string-tables/.vscode/Code.code-workspace b/research/string-tables/.vscode/Code.code-workspace new file mode 100644 index 0000000..c7e938e --- /dev/null +++ b/research/string-tables/.vscode/Code.code-workspace @@ -0,0 +1,49 @@ +{ + "folders": [ + { + "path": ".." + }, + { + "path": "../../framework" + } + ], + "settings": { + "files.associations": { + "*.tpp": "cpp", + "functional": "cpp", + "optional": "cpp", + "array": "cpp", + "*.tcc": "cpp", + "cctype": "cpp", + "clocale": "cpp", + "cmath": "cpp", + "cstdarg": "cpp", + "cstdint": "cpp", + "cstdio": "cpp", + "cstdlib": "cpp", + "cwchar": "cpp", + "cwctype": "cpp", + "deque": "cpp", + "unordered_map": "cpp", + "vector": "cpp", + "exception": "cpp", + "algorithm": "cpp", + "system_error": "cpp", + "tuple": "cpp", + "type_traits": "cpp", + "fstream": "cpp", + "initializer_list": "cpp", + "iosfwd": "cpp", + "istream": "cpp", + "limits": "cpp", + "new": "cpp", + "ostream": "cpp", + "numeric": "cpp", + "sstream": "cpp", + "stdexcept": "cpp", + "streambuf": "cpp", + "utility": "cpp", + "typeinfo": "cpp" + } + } +} \ No newline at end of file diff --git a/research/string-tables/.vscode/c_cpp_properties.json b/research/string-tables/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..1b72752 --- /dev/null +++ b/research/string-tables/.vscode/c_cpp_properties.json @@ -0,0 +1,42 @@ +{ + "configurations": [ + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**", + "/usr/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + }, + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "gcc-x64", + "compileCommands": "${workspaceFolder}/build/compile_commands.json" + }, + { + "name": "Mac", + "includePath": [ + "/usr/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + } + }, + { + "name": "Win32", + "includePath": [ + "c:/Program Files (x86)/Microsoft Visual Studio 14.0/VC/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/Modules/ThreadBoundaryModule.cpp b/research/pipeline/Modules/ThreadBoundaryModule.cpp new file mode 100644 index 0000000..e4b07d4 --- /dev/null +++ b/research/pipeline/Modules/ThreadBoundaryModule.cpp @@ -0,0 +1,89 @@ + +/* + +class Consumer : public RoutingModule { +public: + Consumer( CommandQueue* b, Format format ) + : RoutingModule(), buffer( b ), formatId( format ) + { } + + void init() + { + } + + void start() + { + for (;;) { + const Command &command = buffer->remove(); + RoutingModule::command( command.command, command.arg ); + } + } + + const char* name() { return "Consumer"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue *buffer; + Format formatId; +}; + +class ConsumerThread : public Thread { +public: + ConsumerThread( Consumer *c ) + : consumer( c ) + { } + + void execute( void* ) + { + consumer->start(); + } + +private: + Consumer *consumer; +}; + + +class ThreadBoundryModule : public RoutingModule { +public: + ThreadBoundryModule( int size, Format format ) + : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ), + consumerThread( &consumer ), formatId( format ) + { + } + + void init() + { + } + + void connectTo( Module *m, const Frame &f ) + { + consumer.connectTo( m, f ); + consumerThread.start(0); + } + + void process( const Frame &frame ) + { + readCommandQueue.add( frame ); + } + + const char *name() { return "Thread Boundry Module"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue readCommandQueue; + Consumer consumer; + ConsumerThread consumerThread; + Format formatId; +}; + + +class ProcessBoundryThing : public DispatchInterface { +public: + void dispatch( Command *command ) + { + } +}; + +*/ diff --git a/research/pipeline/Modules/VideoCameraSourceModule.cpp b/research/pipeline/Modules/VideoCameraSourceModule.cpp new file mode 100644 index 0000000..deef2f9 --- /dev/null +++ b/research/pipeline/Modules/VideoCameraSourceModule.cpp @@ -0,0 +1,101 @@ + + +/* +class VideoCameraSourceModule : public SimpleModule { +public: + VideoCameraSourceModule() + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + AVFormatContext *avFormatContext = 0; + AVFormatParameters vp1, *vp = &vp1; + AVInputFormat *fmt1; + memset(vp, 0, sizeof(*vp)); + fmt1 = av_find_input_format("video4linux");//video_grab_format); + vp->device = 0;//"/dev/video";//video_device; + vp->channel = 0;//video_channel; + vp->standard = "pal";//"ntsc";//video_standard; + vp->width = WIDTH; + vp->height = HEIGHT; + vp->frame_rate = 50; + vp->frame_rate_base = 1; + if (av_open_input_file(&avFormatContext, "", fmt1, 0, vp) < 0) { + printf("Could not find video grab device\n"); + exit(1); + } + if ((avFormatContext->ctx_flags & AVFMTCTX_NOHEADER) && av_find_stream_info(avFormatContext) < 0) { + printf("Could not find video grab parameters\n"); + exit(1); + } + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + +// AVCodecContext *videoCodecContext = avcodec_alloc_context(); + AVCodecContext *videoCodecContext = &avFormatContext->streams[0]->codec; + AVCodec *codec = avcodec_find_decoder(avFormatContext->streams[0]->codec.codec_id); + + if ( !codec ) { + printf("error finding decoder\n"); + return; + } + + printf("found decoder: %s\n", codec->name); + + avFormatContext->streams[0]->r_frame_rate = vp->frame_rate; + avFormatContext->streams[0]->r_frame_rate_base = vp->frame_rate_base; + + videoCodecContext->width = vp->width; + videoCodecContext->height = vp->height; + +// if ( avcodec_open( videoCodecContext, &rawvideo_decoder ) < 0 ) { + if ( avcodec_open( videoCodecContext, codec ) < 0 ) { // is rawvideo_decoder + printf("error opening context\n"); + videoCodecContext = 0; + } + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + AVPacket pkt; + while( avFormatContext ) { + if ( av_read_frame(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + AVFrame *picture = avcodec_alloc_frame(); + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = picture; + Frame *currentFrame = new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + currentFrame->ref(); + + int gotPicture = 0; + avcodec_decode_video( videoCodecContext, picture, &gotPicture, pkt.data, pkt.size ); + + if ( gotPicture ) { + yuvFrame->fmt = videoCodecContext->pix_fmt; // is PIX_FMT_YUV422 + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; +// printf("showing frame: %i %ix%i\n", yuvFrame->fmt, yuvFrame->width, yuvFrame->height ); + SimpleModule::process( *currentFrame ); + } + } + } + } + + const char *name() { return "Video Camera Source"; } + Format inputFormat() { return "FRAME_ID_VIDEO_CAMERA_SOURCE"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } +}; +*/ + diff --git a/research/pipeline/PipelineManager.cpp b/research/pipeline/PipelineManager.cpp new file mode 100644 index 0000000..e003559 --- /dev/null +++ b/research/pipeline/PipelineManager.cpp @@ -0,0 +1,162 @@ + +class PipelineManager : public Thread { +public: + PipelineManager(); + void addSource( Format frameType ); + void addDestination( Format frameType ); + void clearTargets(); + void connectTogether(Module *m1, Module *m2, const Frame &f); + void makeConnections(Module *start); + void start( Frame *frame ) { Thread::start( (void *)frame ); } + void execute( void *p ); + void unconnectedRoute( Module *m, const Frame &f ); +private: + std::list sourceModules; + std::list destinationModules; + std::list source; + std::list destination; +}; + + +PipelineManager *pipelineMgr = 0; + + +PipelineManager::PipelineManager() +{ +} + +/* +void PipelineManager::newModule( Module *m ) +{ + printf("adding module: %s\n", m->name() ); + + allModules.push_front( m ); + + // update source modules list + for ( list::iterator it = source.begin(); it != source.end(); ++it ) { + if ( (*it) == m->inputFormat() ) { + sourceModules.push_front( m ); + // Just add it once + break; + } + } + + // update destination modules list + for ( list::iterator it = destination.begin(); it != destination.end(); ++it ) { + if ( (*it) == m->outputFormat() ) { + destinationModules.push_front( m ); + // Just add it once + break; + } + } +} +*/ + +void PipelineManager::addSource( Format frameType ) +{ + // update source modules list + Module *m = moduleMapper()->findModuleWithInputFormat( frameType ); + if ( m ) { + printf("adding source: %s\n", (const char *)frameType ); + source.push_front( frameType ); + sourceModules.push_front( m ); + } else { + printf("No source for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::addDestination( Format frameType ) +{ + Module *m = moduleMapper()->findModuleWithOutputFormat( frameType ); + if ( m ) { + printf("adding destination: %s\n", (const char *)frameType ); + destination.push_front( frameType ); + destinationModules.push_front( m ); + } else { + printf("No destination for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::clearTargets() +{ + sourceModules.clear(); + destinationModules.clear(); + source.clear(); + destination.clear(); +} + +void PipelineManager::connectTogether( Module *m1, Module *m2, const Frame &f ) +{ +/* + //printf(" [%s] -> [%s] %s", m1->outputFormat(), m2->inputFormat(), m2->name() ); + printf(" -> %s", m2->name() ); + + staticDispatch( m2, Init, 0 ); + + if ( m2->isBlocking() || m1->isBlocking() ) { + ThreadBoundryModule *threadModule = new ThreadBoundryModule( 32, m2->inputFormat() ); + threadModule->init(); + m1->connectTo( threadModule, f ); + threadModule->connectTo( m2, f ); + } else { + m1->connectTo( m2, f ); + } +*/ +} + +/* + Connects together module with a module that can process the frame + and then gets the module to process this first frame +*/ +void PipelineManager::unconnectedRoute( Module *m, const Frame &f ) +{ + Module *m2 = moduleMapper()->findModuleWithInputFormat( f.id() ); + if ( m2 ) { + //connectTogether( m, m2, f ); + printf("Connecting together: %s -> %s\n", m->name(), m2->name() ); + staticDispatch( m2, Init, 0 ); + m->connectTo( m2, f ); + m2->connectedFrom( m, f ); + staticDispatch( m2, Process, &f ); + } else { + printf("Didn't find route for %s\n", m->name()); + } +} + +void PipelineManager::makeConnections( Module *start ) +{ +/* + printf("making connections:\n"); + + Frame frame( "UNKNOWN", 0 ); + Module *currentModule = start; + Format dstFmt = destination.front(); + + dispatch( currentModule, Init, 0 ); + printf(" %s (pid: %i)", currentModule->name(), getpid() ); + + while ( currentModule->outputFormat() != dstFmt ) { + Module *m = moduleMapper()->findModuleWithInputFormat( currentModule->outputFormat() ); + if ( m ) { + connectTogether( currentModule, m, frame ); + currentModule = m; + } else { + break; + } + } + printf("\n"); +*/ +} + + +void PipelineManager::execute( void *d ) +{ + printf("starting...\n"); + for ( list::iterator it = sourceModules.begin(); it != sourceModules.end(); ++it ) { + //makeConnections( (*it) ); + staticDispatch( (*it), Init, 0 ); + staticDispatch( (*it), Process, d ); + } +} + + diff --git a/research/pipeline/README.md b/research/pipeline/README.md new file mode 100644 index 0000000..8df026f --- /dev/null +++ b/research/pipeline/README.md @@ -0,0 +1,30 @@ + + +Example sources to support: + +file:/home/user/Documents/images/jpeg/picture.jpg +file:/home/user/Documents/audio/mpeg/greatestsong.mp3 +file:/home/user/Documents/application/playlist/favourites.pls +file:/home/user/Documents/application/playlist/favourites.mpu +http://www.slashdot.org/somefile.mpg +http://www.streaming_radio_server.net:9000 +http://www.streaming_tv_server.net:9000 +camera +microphone +camera & microphone + + +Example outputs to support: + +File/URL +UDP packets +TCP/IP packets +OSS +Alsa +QSS +Visualiser +QDirectPainter +QPainter +XShm +DirectDraw +YUV acceleration diff --git a/research/pipeline/Types/Deadcode.cpp b/research/pipeline/Types/Deadcode.cpp new file mode 100644 index 0000000..d08e52a --- /dev/null +++ b/research/pipeline/Types/Deadcode.cpp @@ -0,0 +1,140 @@ + + +#if 0 + +1 = registerNewFormat("AAC", ".aac", "An AAC decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MP3 decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MAD decoder", AUDIO_CODEC); +1 = registerNewFormat("AAC", ".aac", "My AAC decoder", AUDIO_CODEC); +3 = registerNewFormat("3DS", ".3ds", "3D Studio File", AUDIO_CODEC); + +enum FormatType { + FRAME_ID_FILE_PROTO, + FRAME_ID_HTTP_PROTO, + FRAME_ID_RTSP_PROTO, + FRAME_ID_RTP_PROTO, + FRAME_ID_MMS_PROTO, + + FRAME_ID_GIF_FORMAT, + FRAME_ID_JPG_FORMAT, + FRAME_ID_PNG_FORMAT, + + FRAME_ID_MP3_FORMAT, + FRAME_ID_WAV_FORMAT, + FRAME_ID_GSM_FORMAT, + FRAME_ID_AMR_FORMAT, + + FRAME_ID_MPG_FORMAT, + FRAME_ID_AVI_FORMAT, + FRAME_ID_MP4_FORMAT, + FRAME_ID_MOV_FORMAT, + + FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG_AUDIO_PACKET = FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET, + FRAME_ID_GSM_AUDIO_PACKET, + FRAME_ID_AMR_AUDIO_PACKET, + FRAME_ID_AAC_AUDIO_PACKET, + FRAME_ID_LAST_PACKET_TYPE = FRAME_ID_AMR_AUDIO_PACKET, + + FRAME_ID_VIDEO_PACKET, + FRAME_ID_AUDIO_PACKET, + + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME, + + FRAME_ID_PCM_AUDIO_DATA, + + FRAME_ID_RENDERED_AUDIO, + FRAME_ID_RENDERED_VIDEO, + + FRAME_ID_URL_SOURCE, + FRAME_ID_AUDIO_SOURCE, + FRAME_ID_VIDEO_SOURCE, + + FRAME_ID_MULTIPLE_FORMAT, + FRAME_ID_ANY_ONE_OF_FORMAT, + + FRAME_ID_MULTIPLE_PACKET, + FRAME_ID_ANY_ONE_OF_PACKET, + + FRAME_ID_UNKNOWN +}; + +typedef struct FRAME_GENERIC { +/* + int generalId; + int specificId; + int streamId; +*/ + int bytes; + char* bits; + int pts; +}; + +enum videoCodecId { + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET +}; + +typedef struct FRAME_VIDEO_PACKET { + int codecId; + int bytes; + char* bits; +}; + +enum videoFrameFormat { + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME +}; + +typedef struct FRAME_VIDEO_FRAME { + int format; + int width; + int height; + int bytes; + char* bits; +}; + +struct UpPCMPacket { + int freq; + int bitsPerSample; + int size; + char data[1]; +}; + +struct DownPCMPacket { + +}; + +#endif + + + +/* +struct StreamPacket { + void *private; // AVPacket *pkt; + int streamId; + int size; + char *data; +}; +*/ + +/* +struct StreamPacket { + int streamId; + Frame frame; +}; +*/ + diff --git a/research/pipeline/Types/Format.hpp b/research/pipeline/Types/Format.hpp new file mode 100644 index 0000000..72642b6 --- /dev/null +++ b/research/pipeline/Types/Format.hpp @@ -0,0 +1,29 @@ +#pragma once +#include + +// Format +class Format +{ +public: + Format() : s(nullptr) { } + Format(const Format &other) : s( other.s ) { } + Format(const char *str) : s( str ) { } + bool operator==(const Format& other) + { + return !std::strcmp(other.s, s); + } + operator const char *() + { + return s; + } + bool operator==(const char *other) + { + return !std::strcmp(s, other); + } + bool operator<(const Format& other) const + { + return std::strcmp(other.s, s) < 0; + } +private: + const char *s; +}; diff --git a/research/pipeline/Types/Frame.hpp b/research/pipeline/Types/Frame.hpp new file mode 100644 index 0000000..35ddb08 --- /dev/null +++ b/research/pipeline/Types/Frame.hpp @@ -0,0 +1,51 @@ +#pragma once +#include +#include "Format.hpp" + +// Frame +class Frame +{ +public: + Frame() { } + + Frame( const char* id, void* data ) + : counter( 0 ) + , type( id ) + , bits( data ) + { + pthread_mutex_init( &mutex, NULL ); + } + + void ref() const + { + pthread_mutex_lock( &mutex ); + ++counter; + pthread_mutex_unlock( &mutex ); + } + + void deref() const + { + pthread_mutex_lock( &mutex ); + --counter; + pthread_mutex_unlock( &mutex ); + } + + int refcount() const + { + int ret; + pthread_mutex_lock( &mutex ); + ret = counter; + pthread_mutex_unlock( &mutex ); + return ret; + } + + Format id() const { return type; } + void* data() const { return bits; } + +private: + mutable pthread_mutex_t mutex; + mutable int counter; + Format type; + void *bits; +}; + diff --git a/research/pipeline/Types/Module.hpp b/research/pipeline/Types/Module.hpp new file mode 100644 index 0000000..f0ad0fc --- /dev/null +++ b/research/pipeline/Types/Module.hpp @@ -0,0 +1,118 @@ +#pragma once +#include +#include +#include +#include "Frame.hpp" +#include "Format.hpp" + +class Module; + +enum Commands { Init, Pull, Deref, Process, Simulate, ConnectToModule, ConnectedFrom }; + +typedef Module *Address; + +struct Command { + Address address; + Commands command; + const void *arg; +}; + +// CommandQueue +class CommandQueue { +public: + CommandQueue( int size ); + + void add( const Command & ); + const Command &remove(); + +private: + int max; + const Command **commands; + int in, out; + + pthread_mutex_t mutex; + sem_t free; + sem_t used; +}; + +CommandQueue::CommandQueue( int size ) + : max( size ), in( 0 ), out( 0 ) +{ + commands = new const Command*[max]; + pthread_mutex_init( &mutex, NULL ); + sem_init( &free, 0, max ); + sem_init( &used, 0, 0 ); +} + +void CommandQueue::add( const Command &command ) +{ + while( sem_wait( &free ) != 0 ); + pthread_mutex_lock( &mutex ); + + commands[in] = &command; + in = ( in + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &used ); +} + +const Command &CommandQueue::remove() +{ + while( sem_wait( &used ) != 0 ); + pthread_mutex_lock( &mutex ); + + const Command *command = commands[out]; + out = ( out + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &free ); + + return *command; +} + + + +class ModuleFactory { +public: + ModuleFactory() { } + + virtual const char *name() = 0; + + virtual std::list
threadAffinity() = 0; + virtual bool isBlocking() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; + virtual bool supportsInputFormat( Format ) = 0; + virtual bool supportsOutputFormat( Format ) = 0; + + virtual Module *createInstance() = 0; +}; + + + +// Modules +class Module { +public: + Module() { } + + virtual const char *name() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; +// virtual bool constFrameProcessing() = 0; + +// virtual bool supportsInputType( Format ) = 0; + virtual bool supportsOutputType( Format ) = 0; + +// virtual list inputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } +// virtual list outputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } + + virtual bool isBlocking() = 0;//{ return false; } + virtual std::list
threadAffinity() = 0; + +// virtual void command( Command command, const void *arg, bool priorityFlag ) = 0; + virtual void command( Commands command, const void *arg ) = 0; + + virtual void connectTo( Module *next, const Frame &f ) = 0; + virtual void connectedFrom( Module *next, const Frame &f ) = 0; +}; + diff --git a/research/pipeline/Types/PCMData.hpp b/research/pipeline/Types/PCMData.hpp new file mode 100644 index 0000000..57de038 --- /dev/null +++ b/research/pipeline/Types/PCMData.hpp @@ -0,0 +1,7 @@ +#pragma once + +struct PCMData +{ + int size; + char data[65536]; +}; diff --git a/research/pipeline/Types/Thread.hpp b/research/pipeline/Types/Thread.hpp new file mode 100644 index 0000000..d7922a2 --- /dev/null +++ b/research/pipeline/Types/Thread.hpp @@ -0,0 +1,41 @@ +#pragma once + +// Utils +class Thread { +public: + Thread(); + int start( void* arg ); + +protected: + int run( void* arg ); + static void* entryPoint( void* ); + virtual void setup() { }; + virtual void execute( void* ) = 0; + void* arg() const { return arg_; } + void setArg( void* a ) { arg_ = a; } + +private: + pthread_t tid_; + void* arg_; +}; + +Thread::Thread() {} + +int Thread::start( void* arg ) +{ + setArg(arg); + return pthread_create( &tid_, 0, Thread::entryPoint, this ); +} + +int Thread::run( void* arg ) +{ + printf(" (pid: %i)", getpid() ); + setup(); + execute( arg ); +} + +void* Thread::entryPoint( void* pthis ) +{ + Thread* pt = (Thread*)pthis; + pt->run( pt->arg() ); +} diff --git a/research/pipeline/Types/YUVFrame.hpp b/research/pipeline/Types/YUVFrame.hpp new file mode 100644 index 0000000..109f9a4 --- /dev/null +++ b/research/pipeline/Types/YUVFrame.hpp @@ -0,0 +1,16 @@ +#pragma once +#include "libavcodec/avcodec.h" + + +struct YUVFrame { + int width; + int height; + enum AVPixelFormat fmt; + AVFrame *pic; +/* + uchar *y; + uchar *u; + uchar *v; + int scanlineWidth[3]; +*/ +}; diff --git a/research/pipeline/prototype.cpp b/research/pipeline/prototype.cpp new file mode 100755 index 0000000..a0c03f9 --- /dev/null +++ b/research/pipeline/prototype.cpp @@ -0,0 +1,172 @@ +/* + + Project Carmack 0.01 (AKA Media Library Prototype 01/02) + Copyright John Ryland, 2005 + +*/ + +using namespace std; + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "Types/Frame.hpp" +#include "Types/Thread.hpp" +#include "Types/Module.hpp" + + +#define WIDTH 160 +#define HEIGHT 120 + + + + +/* +class ModulesThread : public Thread, public DispatchInterface { +public: + void execute( void* ) + { + for (;;) { + CommandStruct *command = buffer.remove(); + command->module->command( command->command, command->arg ); + } + } + + void dispatch( CommandStruct *command ) + { + buffer.add( command ); + } + +private: + CommandQueue buffer; +}; +*/ + + + + +static void staticDispatch( Address address, Commands command, const void *arg ) +{ + moduleMapper()->dispatchCommand( address, command, arg ); +} + + + + +struct FFMpegStreamPacket { + AVPacket *packet; +}; + + + + + +void ProcessMessages(); + + + + + + + +Module *a, *b, *c, *d; + + +void registerModules() +{ + moduleMapper()->addModule( new OSSRenderer ); +// moduleMapper()->addModule( d = new YUVRenderer ); + moduleMapper()->addModule( d = new DirectDrawRenderer ); + moduleMapper()->addModule( new MP3DecodeModule ); +// moduleMapper()->addModule( new FFMpegMuxModule ); + moduleMapper()->addModule( new MpegDecodeModule ); +// moduleMapper()->addModule( new MP3SourceModule ); +// moduleMapper()->addModule( new StreamDemuxModule ); + moduleMapper()->addModule( c = new MpegEncodeModule ); +// moduleMapper()->addModule( b = new Splitter ); + moduleMapper()->addModule( new FFMpegSourceModule ); +// moduleMapper()->addModule( a = new VideoCameraSourceModule ); +} + +void playFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_AUDIO" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + //pipelineMgr->start( &file ); + pipelineMgr->execute( &file ); +} + + +void displayCamera() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +void reEncodeFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + pipelineMgr->start( &file ); +} + +void recordVideo() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +int main( int argc, char** argv ) +{ + registerModules(); + pipelineMgr = new PipelineManager; +/* + Frame f; + printf("Connecting together: %s -> %s\n", a->name(), b->name() ); + staticDispatch( b, Init, 0 ); + a->connectTo( b, f ); +// b->connectedFrom( a, f ); + + printf("Connecting together: %s -> %s\n", b->name(), c->name() ); + staticDispatch( c, Init, 0 ); + b->connectTo( c, f ); + + printf("Connecting together: %s -> %s\n", b->name(), d->name() ); + staticDispatch( d, Init, 0 ); + b->connectTo( d, f ); +*/ + playFile( (argc > 1) ? argv[1] : "test.mpg" ); + //reEncodeFile( (argc > 1) ? argv[1] : "test.mpg" ); + //displayCamera(); + //recordVideo(); +} + diff --git a/research/string-tables/.gitignore b/research/string-tables/.gitignore new file mode 100644 index 0000000..7c6ad91 --- /dev/null +++ b/research/string-tables/.gitignore @@ -0,0 +1,61 @@ +build/cmake_install.cmake +build/CMakeCache.txt +build/compile_commands.json +build/FixedStrings.inl +build/libProgram.a +build/libStringsTable.a +build/Makefile +build/StringsTableTest +build/CMakeFiles/cmake.check_cache +build/CMakeFiles/CMakeDirectoryInformation.cmake +build/CMakeFiles/CMakeOutput.log +build/CMakeFiles/CMakeRuleHashes.txt +build/CMakeFiles/feature_tests.bin +build/CMakeFiles/feature_tests.c +build/CMakeFiles/feature_tests.cxx +build/CMakeFiles/Makefile.cmake +build/CMakeFiles/Makefile2 +build/CMakeFiles/progress.marks +build/CMakeFiles/TargetDirectories.txt +build/CMakeFiles/3.5.1/CMakeCCompiler.cmake +build/CMakeFiles/3.5.1/CMakeCXXCompiler.cmake +build/CMakeFiles/3.5.1/CMakeDetermineCompilerABI_C.bin +build/CMakeFiles/3.5.1/CMakeDetermineCompilerABI_CXX.bin +build/CMakeFiles/3.5.1/CMakeSystem.cmake +build/CMakeFiles/3.5.1/CompilerIdC/a.out +build/CMakeFiles/3.5.1/CompilerIdC/CMakeCCompilerId.c +build/CMakeFiles/3.5.1/CompilerIdCXX/a.out +build/CMakeFiles/3.5.1/CompilerIdCXX/CMakeCXXCompilerId.cpp +build/CMakeFiles/Program.dir/build.make +build/CMakeFiles/Program.dir/cmake_clean_target.cmake +build/CMakeFiles/Program.dir/cmake_clean.cmake +build/CMakeFiles/Program.dir/CXX.includecache +build/CMakeFiles/Program.dir/depend.internal +build/CMakeFiles/Program.dir/depend.make +build/CMakeFiles/Program.dir/DependInfo.cmake +build/CMakeFiles/Program.dir/flags.make +build/CMakeFiles/Program.dir/link.txt +build/CMakeFiles/Program.dir/program.cpp.o +build/CMakeFiles/Program.dir/progress.make +build/CMakeFiles/StringsTable.dir/build.make +build/CMakeFiles/StringsTable.dir/cmake_clean_target.cmake +build/CMakeFiles/StringsTable.dir/cmake_clean.cmake +build/CMakeFiles/StringsTable.dir/CXX.includecache +build/CMakeFiles/StringsTable.dir/depend.internal +build/CMakeFiles/StringsTable.dir/depend.make +build/CMakeFiles/StringsTable.dir/DependInfo.cmake +build/CMakeFiles/StringsTable.dir/FixedStrings.cpp.o +build/CMakeFiles/StringsTable.dir/flags.make +build/CMakeFiles/StringsTable.dir/link.txt +build/CMakeFiles/StringsTable.dir/progress.make +build/CMakeFiles/StringsTableTest.dir/build.make +build/CMakeFiles/StringsTableTest.dir/cmake_clean.cmake +build/CMakeFiles/StringsTableTest.dir/CXX.includecache +build/CMakeFiles/StringsTableTest.dir/depend.internal +build/CMakeFiles/StringsTableTest.dir/depend.make +build/CMakeFiles/StringsTableTest.dir/DependInfo.cmake +build/CMakeFiles/StringsTableTest.dir/flags.make +build/CMakeFiles/StringsTableTest.dir/link.txt +build/CMakeFiles/StringsTableTest.dir/main.cpp.o +build/CMakeFiles/StringsTableTest.dir/progress.make +README.pdf diff --git a/research/string-tables/.vscode/Code.code-workspace b/research/string-tables/.vscode/Code.code-workspace new file mode 100644 index 0000000..c7e938e --- /dev/null +++ b/research/string-tables/.vscode/Code.code-workspace @@ -0,0 +1,49 @@ +{ + "folders": [ + { + "path": ".." + }, + { + "path": "../../framework" + } + ], + "settings": { + "files.associations": { + "*.tpp": "cpp", + "functional": "cpp", + "optional": "cpp", + "array": "cpp", + "*.tcc": "cpp", + "cctype": "cpp", + "clocale": "cpp", + "cmath": "cpp", + "cstdarg": "cpp", + "cstdint": "cpp", + "cstdio": "cpp", + "cstdlib": "cpp", + "cwchar": "cpp", + "cwctype": "cpp", + "deque": "cpp", + "unordered_map": "cpp", + "vector": "cpp", + "exception": "cpp", + "algorithm": "cpp", + "system_error": "cpp", + "tuple": "cpp", + "type_traits": "cpp", + "fstream": "cpp", + "initializer_list": "cpp", + "iosfwd": "cpp", + "istream": "cpp", + "limits": "cpp", + "new": "cpp", + "ostream": "cpp", + "numeric": "cpp", + "sstream": "cpp", + "stdexcept": "cpp", + "streambuf": "cpp", + "utility": "cpp", + "typeinfo": "cpp" + } + } +} \ No newline at end of file diff --git a/research/string-tables/.vscode/c_cpp_properties.json b/research/string-tables/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..1b72752 --- /dev/null +++ b/research/string-tables/.vscode/c_cpp_properties.json @@ -0,0 +1,42 @@ +{ + "configurations": [ + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**", + "/usr/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + }, + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "gcc-x64", + "compileCommands": "${workspaceFolder}/build/compile_commands.json" + }, + { + "name": "Mac", + "includePath": [ + "/usr/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + } + }, + { + "name": "Win32", + "includePath": [ + "c:/Program Files (x86)/Microsoft Visual Studio 14.0/VC/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/string-tables/.vscode/launch.json b/research/string-tables/.vscode/launch.json new file mode 100644 index 0000000..ff5abd3 --- /dev/null +++ b/research/string-tables/.vscode/launch.json @@ -0,0 +1,32 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "(gdb) Launch", + "type": "cppdbg", + "request": "launch", + "program": "${workspaceFolder}/build/StringsTableTest", + "args": [], + "stopAtEntry": false, + "cwd": "${workspaceFolder}", + "environment": [ + { + "name": "LD_LIBRARY_PATH", + "value": "" + } + ], + "externalConsole": false, + "MIMode": "gdb", + "setupCommands": [ + { + "description": "Enable pretty-printing for gdb", + "text": "-enable-pretty-printing", + "ignoreFailures": true + } + ] + } + ] +} \ No newline at end of file diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/Modules/ThreadBoundaryModule.cpp b/research/pipeline/Modules/ThreadBoundaryModule.cpp new file mode 100644 index 0000000..e4b07d4 --- /dev/null +++ b/research/pipeline/Modules/ThreadBoundaryModule.cpp @@ -0,0 +1,89 @@ + +/* + +class Consumer : public RoutingModule { +public: + Consumer( CommandQueue* b, Format format ) + : RoutingModule(), buffer( b ), formatId( format ) + { } + + void init() + { + } + + void start() + { + for (;;) { + const Command &command = buffer->remove(); + RoutingModule::command( command.command, command.arg ); + } + } + + const char* name() { return "Consumer"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue *buffer; + Format formatId; +}; + +class ConsumerThread : public Thread { +public: + ConsumerThread( Consumer *c ) + : consumer( c ) + { } + + void execute( void* ) + { + consumer->start(); + } + +private: + Consumer *consumer; +}; + + +class ThreadBoundryModule : public RoutingModule { +public: + ThreadBoundryModule( int size, Format format ) + : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ), + consumerThread( &consumer ), formatId( format ) + { + } + + void init() + { + } + + void connectTo( Module *m, const Frame &f ) + { + consumer.connectTo( m, f ); + consumerThread.start(0); + } + + void process( const Frame &frame ) + { + readCommandQueue.add( frame ); + } + + const char *name() { return "Thread Boundry Module"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue readCommandQueue; + Consumer consumer; + ConsumerThread consumerThread; + Format formatId; +}; + + +class ProcessBoundryThing : public DispatchInterface { +public: + void dispatch( Command *command ) + { + } +}; + +*/ diff --git a/research/pipeline/Modules/VideoCameraSourceModule.cpp b/research/pipeline/Modules/VideoCameraSourceModule.cpp new file mode 100644 index 0000000..deef2f9 --- /dev/null +++ b/research/pipeline/Modules/VideoCameraSourceModule.cpp @@ -0,0 +1,101 @@ + + +/* +class VideoCameraSourceModule : public SimpleModule { +public: + VideoCameraSourceModule() + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + AVFormatContext *avFormatContext = 0; + AVFormatParameters vp1, *vp = &vp1; + AVInputFormat *fmt1; + memset(vp, 0, sizeof(*vp)); + fmt1 = av_find_input_format("video4linux");//video_grab_format); + vp->device = 0;//"/dev/video";//video_device; + vp->channel = 0;//video_channel; + vp->standard = "pal";//"ntsc";//video_standard; + vp->width = WIDTH; + vp->height = HEIGHT; + vp->frame_rate = 50; + vp->frame_rate_base = 1; + if (av_open_input_file(&avFormatContext, "", fmt1, 0, vp) < 0) { + printf("Could not find video grab device\n"); + exit(1); + } + if ((avFormatContext->ctx_flags & AVFMTCTX_NOHEADER) && av_find_stream_info(avFormatContext) < 0) { + printf("Could not find video grab parameters\n"); + exit(1); + } + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + +// AVCodecContext *videoCodecContext = avcodec_alloc_context(); + AVCodecContext *videoCodecContext = &avFormatContext->streams[0]->codec; + AVCodec *codec = avcodec_find_decoder(avFormatContext->streams[0]->codec.codec_id); + + if ( !codec ) { + printf("error finding decoder\n"); + return; + } + + printf("found decoder: %s\n", codec->name); + + avFormatContext->streams[0]->r_frame_rate = vp->frame_rate; + avFormatContext->streams[0]->r_frame_rate_base = vp->frame_rate_base; + + videoCodecContext->width = vp->width; + videoCodecContext->height = vp->height; + +// if ( avcodec_open( videoCodecContext, &rawvideo_decoder ) < 0 ) { + if ( avcodec_open( videoCodecContext, codec ) < 0 ) { // is rawvideo_decoder + printf("error opening context\n"); + videoCodecContext = 0; + } + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + AVPacket pkt; + while( avFormatContext ) { + if ( av_read_frame(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + AVFrame *picture = avcodec_alloc_frame(); + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = picture; + Frame *currentFrame = new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + currentFrame->ref(); + + int gotPicture = 0; + avcodec_decode_video( videoCodecContext, picture, &gotPicture, pkt.data, pkt.size ); + + if ( gotPicture ) { + yuvFrame->fmt = videoCodecContext->pix_fmt; // is PIX_FMT_YUV422 + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; +// printf("showing frame: %i %ix%i\n", yuvFrame->fmt, yuvFrame->width, yuvFrame->height ); + SimpleModule::process( *currentFrame ); + } + } + } + } + + const char *name() { return "Video Camera Source"; } + Format inputFormat() { return "FRAME_ID_VIDEO_CAMERA_SOURCE"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } +}; +*/ + diff --git a/research/pipeline/PipelineManager.cpp b/research/pipeline/PipelineManager.cpp new file mode 100644 index 0000000..e003559 --- /dev/null +++ b/research/pipeline/PipelineManager.cpp @@ -0,0 +1,162 @@ + +class PipelineManager : public Thread { +public: + PipelineManager(); + void addSource( Format frameType ); + void addDestination( Format frameType ); + void clearTargets(); + void connectTogether(Module *m1, Module *m2, const Frame &f); + void makeConnections(Module *start); + void start( Frame *frame ) { Thread::start( (void *)frame ); } + void execute( void *p ); + void unconnectedRoute( Module *m, const Frame &f ); +private: + std::list sourceModules; + std::list destinationModules; + std::list source; + std::list destination; +}; + + +PipelineManager *pipelineMgr = 0; + + +PipelineManager::PipelineManager() +{ +} + +/* +void PipelineManager::newModule( Module *m ) +{ + printf("adding module: %s\n", m->name() ); + + allModules.push_front( m ); + + // update source modules list + for ( list::iterator it = source.begin(); it != source.end(); ++it ) { + if ( (*it) == m->inputFormat() ) { + sourceModules.push_front( m ); + // Just add it once + break; + } + } + + // update destination modules list + for ( list::iterator it = destination.begin(); it != destination.end(); ++it ) { + if ( (*it) == m->outputFormat() ) { + destinationModules.push_front( m ); + // Just add it once + break; + } + } +} +*/ + +void PipelineManager::addSource( Format frameType ) +{ + // update source modules list + Module *m = moduleMapper()->findModuleWithInputFormat( frameType ); + if ( m ) { + printf("adding source: %s\n", (const char *)frameType ); + source.push_front( frameType ); + sourceModules.push_front( m ); + } else { + printf("No source for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::addDestination( Format frameType ) +{ + Module *m = moduleMapper()->findModuleWithOutputFormat( frameType ); + if ( m ) { + printf("adding destination: %s\n", (const char *)frameType ); + destination.push_front( frameType ); + destinationModules.push_front( m ); + } else { + printf("No destination for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::clearTargets() +{ + sourceModules.clear(); + destinationModules.clear(); + source.clear(); + destination.clear(); +} + +void PipelineManager::connectTogether( Module *m1, Module *m2, const Frame &f ) +{ +/* + //printf(" [%s] -> [%s] %s", m1->outputFormat(), m2->inputFormat(), m2->name() ); + printf(" -> %s", m2->name() ); + + staticDispatch( m2, Init, 0 ); + + if ( m2->isBlocking() || m1->isBlocking() ) { + ThreadBoundryModule *threadModule = new ThreadBoundryModule( 32, m2->inputFormat() ); + threadModule->init(); + m1->connectTo( threadModule, f ); + threadModule->connectTo( m2, f ); + } else { + m1->connectTo( m2, f ); + } +*/ +} + +/* + Connects together module with a module that can process the frame + and then gets the module to process this first frame +*/ +void PipelineManager::unconnectedRoute( Module *m, const Frame &f ) +{ + Module *m2 = moduleMapper()->findModuleWithInputFormat( f.id() ); + if ( m2 ) { + //connectTogether( m, m2, f ); + printf("Connecting together: %s -> %s\n", m->name(), m2->name() ); + staticDispatch( m2, Init, 0 ); + m->connectTo( m2, f ); + m2->connectedFrom( m, f ); + staticDispatch( m2, Process, &f ); + } else { + printf("Didn't find route for %s\n", m->name()); + } +} + +void PipelineManager::makeConnections( Module *start ) +{ +/* + printf("making connections:\n"); + + Frame frame( "UNKNOWN", 0 ); + Module *currentModule = start; + Format dstFmt = destination.front(); + + dispatch( currentModule, Init, 0 ); + printf(" %s (pid: %i)", currentModule->name(), getpid() ); + + while ( currentModule->outputFormat() != dstFmt ) { + Module *m = moduleMapper()->findModuleWithInputFormat( currentModule->outputFormat() ); + if ( m ) { + connectTogether( currentModule, m, frame ); + currentModule = m; + } else { + break; + } + } + printf("\n"); +*/ +} + + +void PipelineManager::execute( void *d ) +{ + printf("starting...\n"); + for ( list::iterator it = sourceModules.begin(); it != sourceModules.end(); ++it ) { + //makeConnections( (*it) ); + staticDispatch( (*it), Init, 0 ); + staticDispatch( (*it), Process, d ); + } +} + + diff --git a/research/pipeline/README.md b/research/pipeline/README.md new file mode 100644 index 0000000..8df026f --- /dev/null +++ b/research/pipeline/README.md @@ -0,0 +1,30 @@ + + +Example sources to support: + +file:/home/user/Documents/images/jpeg/picture.jpg +file:/home/user/Documents/audio/mpeg/greatestsong.mp3 +file:/home/user/Documents/application/playlist/favourites.pls +file:/home/user/Documents/application/playlist/favourites.mpu +http://www.slashdot.org/somefile.mpg +http://www.streaming_radio_server.net:9000 +http://www.streaming_tv_server.net:9000 +camera +microphone +camera & microphone + + +Example outputs to support: + +File/URL +UDP packets +TCP/IP packets +OSS +Alsa +QSS +Visualiser +QDirectPainter +QPainter +XShm +DirectDraw +YUV acceleration diff --git a/research/pipeline/Types/Deadcode.cpp b/research/pipeline/Types/Deadcode.cpp new file mode 100644 index 0000000..d08e52a --- /dev/null +++ b/research/pipeline/Types/Deadcode.cpp @@ -0,0 +1,140 @@ + + +#if 0 + +1 = registerNewFormat("AAC", ".aac", "An AAC decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MP3 decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MAD decoder", AUDIO_CODEC); +1 = registerNewFormat("AAC", ".aac", "My AAC decoder", AUDIO_CODEC); +3 = registerNewFormat("3DS", ".3ds", "3D Studio File", AUDIO_CODEC); + +enum FormatType { + FRAME_ID_FILE_PROTO, + FRAME_ID_HTTP_PROTO, + FRAME_ID_RTSP_PROTO, + FRAME_ID_RTP_PROTO, + FRAME_ID_MMS_PROTO, + + FRAME_ID_GIF_FORMAT, + FRAME_ID_JPG_FORMAT, + FRAME_ID_PNG_FORMAT, + + FRAME_ID_MP3_FORMAT, + FRAME_ID_WAV_FORMAT, + FRAME_ID_GSM_FORMAT, + FRAME_ID_AMR_FORMAT, + + FRAME_ID_MPG_FORMAT, + FRAME_ID_AVI_FORMAT, + FRAME_ID_MP4_FORMAT, + FRAME_ID_MOV_FORMAT, + + FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG_AUDIO_PACKET = FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET, + FRAME_ID_GSM_AUDIO_PACKET, + FRAME_ID_AMR_AUDIO_PACKET, + FRAME_ID_AAC_AUDIO_PACKET, + FRAME_ID_LAST_PACKET_TYPE = FRAME_ID_AMR_AUDIO_PACKET, + + FRAME_ID_VIDEO_PACKET, + FRAME_ID_AUDIO_PACKET, + + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME, + + FRAME_ID_PCM_AUDIO_DATA, + + FRAME_ID_RENDERED_AUDIO, + FRAME_ID_RENDERED_VIDEO, + + FRAME_ID_URL_SOURCE, + FRAME_ID_AUDIO_SOURCE, + FRAME_ID_VIDEO_SOURCE, + + FRAME_ID_MULTIPLE_FORMAT, + FRAME_ID_ANY_ONE_OF_FORMAT, + + FRAME_ID_MULTIPLE_PACKET, + FRAME_ID_ANY_ONE_OF_PACKET, + + FRAME_ID_UNKNOWN +}; + +typedef struct FRAME_GENERIC { +/* + int generalId; + int specificId; + int streamId; +*/ + int bytes; + char* bits; + int pts; +}; + +enum videoCodecId { + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET +}; + +typedef struct FRAME_VIDEO_PACKET { + int codecId; + int bytes; + char* bits; +}; + +enum videoFrameFormat { + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME +}; + +typedef struct FRAME_VIDEO_FRAME { + int format; + int width; + int height; + int bytes; + char* bits; +}; + +struct UpPCMPacket { + int freq; + int bitsPerSample; + int size; + char data[1]; +}; + +struct DownPCMPacket { + +}; + +#endif + + + +/* +struct StreamPacket { + void *private; // AVPacket *pkt; + int streamId; + int size; + char *data; +}; +*/ + +/* +struct StreamPacket { + int streamId; + Frame frame; +}; +*/ + diff --git a/research/pipeline/Types/Format.hpp b/research/pipeline/Types/Format.hpp new file mode 100644 index 0000000..72642b6 --- /dev/null +++ b/research/pipeline/Types/Format.hpp @@ -0,0 +1,29 @@ +#pragma once +#include + +// Format +class Format +{ +public: + Format() : s(nullptr) { } + Format(const Format &other) : s( other.s ) { } + Format(const char *str) : s( str ) { } + bool operator==(const Format& other) + { + return !std::strcmp(other.s, s); + } + operator const char *() + { + return s; + } + bool operator==(const char *other) + { + return !std::strcmp(s, other); + } + bool operator<(const Format& other) const + { + return std::strcmp(other.s, s) < 0; + } +private: + const char *s; +}; diff --git a/research/pipeline/Types/Frame.hpp b/research/pipeline/Types/Frame.hpp new file mode 100644 index 0000000..35ddb08 --- /dev/null +++ b/research/pipeline/Types/Frame.hpp @@ -0,0 +1,51 @@ +#pragma once +#include +#include "Format.hpp" + +// Frame +class Frame +{ +public: + Frame() { } + + Frame( const char* id, void* data ) + : counter( 0 ) + , type( id ) + , bits( data ) + { + pthread_mutex_init( &mutex, NULL ); + } + + void ref() const + { + pthread_mutex_lock( &mutex ); + ++counter; + pthread_mutex_unlock( &mutex ); + } + + void deref() const + { + pthread_mutex_lock( &mutex ); + --counter; + pthread_mutex_unlock( &mutex ); + } + + int refcount() const + { + int ret; + pthread_mutex_lock( &mutex ); + ret = counter; + pthread_mutex_unlock( &mutex ); + return ret; + } + + Format id() const { return type; } + void* data() const { return bits; } + +private: + mutable pthread_mutex_t mutex; + mutable int counter; + Format type; + void *bits; +}; + diff --git a/research/pipeline/Types/Module.hpp b/research/pipeline/Types/Module.hpp new file mode 100644 index 0000000..f0ad0fc --- /dev/null +++ b/research/pipeline/Types/Module.hpp @@ -0,0 +1,118 @@ +#pragma once +#include +#include +#include +#include "Frame.hpp" +#include "Format.hpp" + +class Module; + +enum Commands { Init, Pull, Deref, Process, Simulate, ConnectToModule, ConnectedFrom }; + +typedef Module *Address; + +struct Command { + Address address; + Commands command; + const void *arg; +}; + +// CommandQueue +class CommandQueue { +public: + CommandQueue( int size ); + + void add( const Command & ); + const Command &remove(); + +private: + int max; + const Command **commands; + int in, out; + + pthread_mutex_t mutex; + sem_t free; + sem_t used; +}; + +CommandQueue::CommandQueue( int size ) + : max( size ), in( 0 ), out( 0 ) +{ + commands = new const Command*[max]; + pthread_mutex_init( &mutex, NULL ); + sem_init( &free, 0, max ); + sem_init( &used, 0, 0 ); +} + +void CommandQueue::add( const Command &command ) +{ + while( sem_wait( &free ) != 0 ); + pthread_mutex_lock( &mutex ); + + commands[in] = &command; + in = ( in + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &used ); +} + +const Command &CommandQueue::remove() +{ + while( sem_wait( &used ) != 0 ); + pthread_mutex_lock( &mutex ); + + const Command *command = commands[out]; + out = ( out + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &free ); + + return *command; +} + + + +class ModuleFactory { +public: + ModuleFactory() { } + + virtual const char *name() = 0; + + virtual std::list
threadAffinity() = 0; + virtual bool isBlocking() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; + virtual bool supportsInputFormat( Format ) = 0; + virtual bool supportsOutputFormat( Format ) = 0; + + virtual Module *createInstance() = 0; +}; + + + +// Modules +class Module { +public: + Module() { } + + virtual const char *name() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; +// virtual bool constFrameProcessing() = 0; + +// virtual bool supportsInputType( Format ) = 0; + virtual bool supportsOutputType( Format ) = 0; + +// virtual list inputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } +// virtual list outputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } + + virtual bool isBlocking() = 0;//{ return false; } + virtual std::list
threadAffinity() = 0; + +// virtual void command( Command command, const void *arg, bool priorityFlag ) = 0; + virtual void command( Commands command, const void *arg ) = 0; + + virtual void connectTo( Module *next, const Frame &f ) = 0; + virtual void connectedFrom( Module *next, const Frame &f ) = 0; +}; + diff --git a/research/pipeline/Types/PCMData.hpp b/research/pipeline/Types/PCMData.hpp new file mode 100644 index 0000000..57de038 --- /dev/null +++ b/research/pipeline/Types/PCMData.hpp @@ -0,0 +1,7 @@ +#pragma once + +struct PCMData +{ + int size; + char data[65536]; +}; diff --git a/research/pipeline/Types/Thread.hpp b/research/pipeline/Types/Thread.hpp new file mode 100644 index 0000000..d7922a2 --- /dev/null +++ b/research/pipeline/Types/Thread.hpp @@ -0,0 +1,41 @@ +#pragma once + +// Utils +class Thread { +public: + Thread(); + int start( void* arg ); + +protected: + int run( void* arg ); + static void* entryPoint( void* ); + virtual void setup() { }; + virtual void execute( void* ) = 0; + void* arg() const { return arg_; } + void setArg( void* a ) { arg_ = a; } + +private: + pthread_t tid_; + void* arg_; +}; + +Thread::Thread() {} + +int Thread::start( void* arg ) +{ + setArg(arg); + return pthread_create( &tid_, 0, Thread::entryPoint, this ); +} + +int Thread::run( void* arg ) +{ + printf(" (pid: %i)", getpid() ); + setup(); + execute( arg ); +} + +void* Thread::entryPoint( void* pthis ) +{ + Thread* pt = (Thread*)pthis; + pt->run( pt->arg() ); +} diff --git a/research/pipeline/Types/YUVFrame.hpp b/research/pipeline/Types/YUVFrame.hpp new file mode 100644 index 0000000..109f9a4 --- /dev/null +++ b/research/pipeline/Types/YUVFrame.hpp @@ -0,0 +1,16 @@ +#pragma once +#include "libavcodec/avcodec.h" + + +struct YUVFrame { + int width; + int height; + enum AVPixelFormat fmt; + AVFrame *pic; +/* + uchar *y; + uchar *u; + uchar *v; + int scanlineWidth[3]; +*/ +}; diff --git a/research/pipeline/prototype.cpp b/research/pipeline/prototype.cpp new file mode 100755 index 0000000..a0c03f9 --- /dev/null +++ b/research/pipeline/prototype.cpp @@ -0,0 +1,172 @@ +/* + + Project Carmack 0.01 (AKA Media Library Prototype 01/02) + Copyright John Ryland, 2005 + +*/ + +using namespace std; + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "Types/Frame.hpp" +#include "Types/Thread.hpp" +#include "Types/Module.hpp" + + +#define WIDTH 160 +#define HEIGHT 120 + + + + +/* +class ModulesThread : public Thread, public DispatchInterface { +public: + void execute( void* ) + { + for (;;) { + CommandStruct *command = buffer.remove(); + command->module->command( command->command, command->arg ); + } + } + + void dispatch( CommandStruct *command ) + { + buffer.add( command ); + } + +private: + CommandQueue buffer; +}; +*/ + + + + +static void staticDispatch( Address address, Commands command, const void *arg ) +{ + moduleMapper()->dispatchCommand( address, command, arg ); +} + + + + +struct FFMpegStreamPacket { + AVPacket *packet; +}; + + + + + +void ProcessMessages(); + + + + + + + +Module *a, *b, *c, *d; + + +void registerModules() +{ + moduleMapper()->addModule( new OSSRenderer ); +// moduleMapper()->addModule( d = new YUVRenderer ); + moduleMapper()->addModule( d = new DirectDrawRenderer ); + moduleMapper()->addModule( new MP3DecodeModule ); +// moduleMapper()->addModule( new FFMpegMuxModule ); + moduleMapper()->addModule( new MpegDecodeModule ); +// moduleMapper()->addModule( new MP3SourceModule ); +// moduleMapper()->addModule( new StreamDemuxModule ); + moduleMapper()->addModule( c = new MpegEncodeModule ); +// moduleMapper()->addModule( b = new Splitter ); + moduleMapper()->addModule( new FFMpegSourceModule ); +// moduleMapper()->addModule( a = new VideoCameraSourceModule ); +} + +void playFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_AUDIO" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + //pipelineMgr->start( &file ); + pipelineMgr->execute( &file ); +} + + +void displayCamera() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +void reEncodeFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + pipelineMgr->start( &file ); +} + +void recordVideo() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +int main( int argc, char** argv ) +{ + registerModules(); + pipelineMgr = new PipelineManager; +/* + Frame f; + printf("Connecting together: %s -> %s\n", a->name(), b->name() ); + staticDispatch( b, Init, 0 ); + a->connectTo( b, f ); +// b->connectedFrom( a, f ); + + printf("Connecting together: %s -> %s\n", b->name(), c->name() ); + staticDispatch( c, Init, 0 ); + b->connectTo( c, f ); + + printf("Connecting together: %s -> %s\n", b->name(), d->name() ); + staticDispatch( d, Init, 0 ); + b->connectTo( d, f ); +*/ + playFile( (argc > 1) ? argv[1] : "test.mpg" ); + //reEncodeFile( (argc > 1) ? argv[1] : "test.mpg" ); + //displayCamera(); + //recordVideo(); +} + diff --git a/research/string-tables/.gitignore b/research/string-tables/.gitignore new file mode 100644 index 0000000..7c6ad91 --- /dev/null +++ b/research/string-tables/.gitignore @@ -0,0 +1,61 @@ +build/cmake_install.cmake +build/CMakeCache.txt +build/compile_commands.json +build/FixedStrings.inl +build/libProgram.a +build/libStringsTable.a +build/Makefile +build/StringsTableTest +build/CMakeFiles/cmake.check_cache +build/CMakeFiles/CMakeDirectoryInformation.cmake +build/CMakeFiles/CMakeOutput.log +build/CMakeFiles/CMakeRuleHashes.txt +build/CMakeFiles/feature_tests.bin +build/CMakeFiles/feature_tests.c +build/CMakeFiles/feature_tests.cxx +build/CMakeFiles/Makefile.cmake +build/CMakeFiles/Makefile2 +build/CMakeFiles/progress.marks +build/CMakeFiles/TargetDirectories.txt +build/CMakeFiles/3.5.1/CMakeCCompiler.cmake +build/CMakeFiles/3.5.1/CMakeCXXCompiler.cmake +build/CMakeFiles/3.5.1/CMakeDetermineCompilerABI_C.bin +build/CMakeFiles/3.5.1/CMakeDetermineCompilerABI_CXX.bin +build/CMakeFiles/3.5.1/CMakeSystem.cmake +build/CMakeFiles/3.5.1/CompilerIdC/a.out +build/CMakeFiles/3.5.1/CompilerIdC/CMakeCCompilerId.c +build/CMakeFiles/3.5.1/CompilerIdCXX/a.out +build/CMakeFiles/3.5.1/CompilerIdCXX/CMakeCXXCompilerId.cpp +build/CMakeFiles/Program.dir/build.make +build/CMakeFiles/Program.dir/cmake_clean_target.cmake +build/CMakeFiles/Program.dir/cmake_clean.cmake +build/CMakeFiles/Program.dir/CXX.includecache +build/CMakeFiles/Program.dir/depend.internal +build/CMakeFiles/Program.dir/depend.make +build/CMakeFiles/Program.dir/DependInfo.cmake +build/CMakeFiles/Program.dir/flags.make +build/CMakeFiles/Program.dir/link.txt +build/CMakeFiles/Program.dir/program.cpp.o +build/CMakeFiles/Program.dir/progress.make +build/CMakeFiles/StringsTable.dir/build.make +build/CMakeFiles/StringsTable.dir/cmake_clean_target.cmake +build/CMakeFiles/StringsTable.dir/cmake_clean.cmake +build/CMakeFiles/StringsTable.dir/CXX.includecache +build/CMakeFiles/StringsTable.dir/depend.internal +build/CMakeFiles/StringsTable.dir/depend.make +build/CMakeFiles/StringsTable.dir/DependInfo.cmake +build/CMakeFiles/StringsTable.dir/FixedStrings.cpp.o +build/CMakeFiles/StringsTable.dir/flags.make +build/CMakeFiles/StringsTable.dir/link.txt +build/CMakeFiles/StringsTable.dir/progress.make +build/CMakeFiles/StringsTableTest.dir/build.make +build/CMakeFiles/StringsTableTest.dir/cmake_clean.cmake +build/CMakeFiles/StringsTableTest.dir/CXX.includecache +build/CMakeFiles/StringsTableTest.dir/depend.internal +build/CMakeFiles/StringsTableTest.dir/depend.make +build/CMakeFiles/StringsTableTest.dir/DependInfo.cmake +build/CMakeFiles/StringsTableTest.dir/flags.make +build/CMakeFiles/StringsTableTest.dir/link.txt +build/CMakeFiles/StringsTableTest.dir/main.cpp.o +build/CMakeFiles/StringsTableTest.dir/progress.make +README.pdf diff --git a/research/string-tables/.vscode/Code.code-workspace b/research/string-tables/.vscode/Code.code-workspace new file mode 100644 index 0000000..c7e938e --- /dev/null +++ b/research/string-tables/.vscode/Code.code-workspace @@ -0,0 +1,49 @@ +{ + "folders": [ + { + "path": ".." + }, + { + "path": "../../framework" + } + ], + "settings": { + "files.associations": { + "*.tpp": "cpp", + "functional": "cpp", + "optional": "cpp", + "array": "cpp", + "*.tcc": "cpp", + "cctype": "cpp", + "clocale": "cpp", + "cmath": "cpp", + "cstdarg": "cpp", + "cstdint": "cpp", + "cstdio": "cpp", + "cstdlib": "cpp", + "cwchar": "cpp", + "cwctype": "cpp", + "deque": "cpp", + "unordered_map": "cpp", + "vector": "cpp", + "exception": "cpp", + "algorithm": "cpp", + "system_error": "cpp", + "tuple": "cpp", + "type_traits": "cpp", + "fstream": "cpp", + "initializer_list": "cpp", + "iosfwd": "cpp", + "istream": "cpp", + "limits": "cpp", + "new": "cpp", + "ostream": "cpp", + "numeric": "cpp", + "sstream": "cpp", + "stdexcept": "cpp", + "streambuf": "cpp", + "utility": "cpp", + "typeinfo": "cpp" + } + } +} \ No newline at end of file diff --git a/research/string-tables/.vscode/c_cpp_properties.json b/research/string-tables/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..1b72752 --- /dev/null +++ b/research/string-tables/.vscode/c_cpp_properties.json @@ -0,0 +1,42 @@ +{ + "configurations": [ + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**", + "/usr/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + }, + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "gcc-x64", + "compileCommands": "${workspaceFolder}/build/compile_commands.json" + }, + { + "name": "Mac", + "includePath": [ + "/usr/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + } + }, + { + "name": "Win32", + "includePath": [ + "c:/Program Files (x86)/Microsoft Visual Studio 14.0/VC/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/string-tables/.vscode/launch.json b/research/string-tables/.vscode/launch.json new file mode 100644 index 0000000..ff5abd3 --- /dev/null +++ b/research/string-tables/.vscode/launch.json @@ -0,0 +1,32 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "(gdb) Launch", + "type": "cppdbg", + "request": "launch", + "program": "${workspaceFolder}/build/StringsTableTest", + "args": [], + "stopAtEntry": false, + "cwd": "${workspaceFolder}", + "environment": [ + { + "name": "LD_LIBRARY_PATH", + "value": "" + } + ], + "externalConsole": false, + "MIMode": "gdb", + "setupCommands": [ + { + "description": "Enable pretty-printing for gdb", + "text": "-enable-pretty-printing", + "ignoreFailures": true + } + ] + } + ] +} \ No newline at end of file diff --git a/research/string-tables/.vscode/tasks.json b/research/string-tables/.vscode/tasks.json new file mode 100644 index 0000000..64a18e6 --- /dev/null +++ b/research/string-tables/.vscode/tasks.json @@ -0,0 +1,41 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "Build C++ project", + "type": "shell", + "group": "build", + "command": "cd ./build && make", + "problemMatcher": [] + }, + { + "label": "Build & run C++ project", + "type": "shell", + "group": { + "kind": "build", + "isDefault": true + }, + "command": "cd ./build && make && ./StringsTableTest", + "problemMatcher": [] + }, + { + "label": "Build CMake", + "type": "shell", + "group": "build", + "command": "cd ./build && cmake build .." + }, + { + "label": "Compile Markdown", + "type": "shell", + "args": [], + "command": "${command:extension.markdown-pdf: Export (PDF)}", + + "command2": "markdown-it README.md -o README.html", + "presentation": { + "reveal": "never", + "panel": "shared", + }, + "problemMatcher": [] + } + ] +} \ No newline at end of file diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/Modules/ThreadBoundaryModule.cpp b/research/pipeline/Modules/ThreadBoundaryModule.cpp new file mode 100644 index 0000000..e4b07d4 --- /dev/null +++ b/research/pipeline/Modules/ThreadBoundaryModule.cpp @@ -0,0 +1,89 @@ + +/* + +class Consumer : public RoutingModule { +public: + Consumer( CommandQueue* b, Format format ) + : RoutingModule(), buffer( b ), formatId( format ) + { } + + void init() + { + } + + void start() + { + for (;;) { + const Command &command = buffer->remove(); + RoutingModule::command( command.command, command.arg ); + } + } + + const char* name() { return "Consumer"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue *buffer; + Format formatId; +}; + +class ConsumerThread : public Thread { +public: + ConsumerThread( Consumer *c ) + : consumer( c ) + { } + + void execute( void* ) + { + consumer->start(); + } + +private: + Consumer *consumer; +}; + + +class ThreadBoundryModule : public RoutingModule { +public: + ThreadBoundryModule( int size, Format format ) + : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ), + consumerThread( &consumer ), formatId( format ) + { + } + + void init() + { + } + + void connectTo( Module *m, const Frame &f ) + { + consumer.connectTo( m, f ); + consumerThread.start(0); + } + + void process( const Frame &frame ) + { + readCommandQueue.add( frame ); + } + + const char *name() { return "Thread Boundry Module"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue readCommandQueue; + Consumer consumer; + ConsumerThread consumerThread; + Format formatId; +}; + + +class ProcessBoundryThing : public DispatchInterface { +public: + void dispatch( Command *command ) + { + } +}; + +*/ diff --git a/research/pipeline/Modules/VideoCameraSourceModule.cpp b/research/pipeline/Modules/VideoCameraSourceModule.cpp new file mode 100644 index 0000000..deef2f9 --- /dev/null +++ b/research/pipeline/Modules/VideoCameraSourceModule.cpp @@ -0,0 +1,101 @@ + + +/* +class VideoCameraSourceModule : public SimpleModule { +public: + VideoCameraSourceModule() + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + AVFormatContext *avFormatContext = 0; + AVFormatParameters vp1, *vp = &vp1; + AVInputFormat *fmt1; + memset(vp, 0, sizeof(*vp)); + fmt1 = av_find_input_format("video4linux");//video_grab_format); + vp->device = 0;//"/dev/video";//video_device; + vp->channel = 0;//video_channel; + vp->standard = "pal";//"ntsc";//video_standard; + vp->width = WIDTH; + vp->height = HEIGHT; + vp->frame_rate = 50; + vp->frame_rate_base = 1; + if (av_open_input_file(&avFormatContext, "", fmt1, 0, vp) < 0) { + printf("Could not find video grab device\n"); + exit(1); + } + if ((avFormatContext->ctx_flags & AVFMTCTX_NOHEADER) && av_find_stream_info(avFormatContext) < 0) { + printf("Could not find video grab parameters\n"); + exit(1); + } + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + +// AVCodecContext *videoCodecContext = avcodec_alloc_context(); + AVCodecContext *videoCodecContext = &avFormatContext->streams[0]->codec; + AVCodec *codec = avcodec_find_decoder(avFormatContext->streams[0]->codec.codec_id); + + if ( !codec ) { + printf("error finding decoder\n"); + return; + } + + printf("found decoder: %s\n", codec->name); + + avFormatContext->streams[0]->r_frame_rate = vp->frame_rate; + avFormatContext->streams[0]->r_frame_rate_base = vp->frame_rate_base; + + videoCodecContext->width = vp->width; + videoCodecContext->height = vp->height; + +// if ( avcodec_open( videoCodecContext, &rawvideo_decoder ) < 0 ) { + if ( avcodec_open( videoCodecContext, codec ) < 0 ) { // is rawvideo_decoder + printf("error opening context\n"); + videoCodecContext = 0; + } + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + AVPacket pkt; + while( avFormatContext ) { + if ( av_read_frame(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + AVFrame *picture = avcodec_alloc_frame(); + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = picture; + Frame *currentFrame = new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + currentFrame->ref(); + + int gotPicture = 0; + avcodec_decode_video( videoCodecContext, picture, &gotPicture, pkt.data, pkt.size ); + + if ( gotPicture ) { + yuvFrame->fmt = videoCodecContext->pix_fmt; // is PIX_FMT_YUV422 + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; +// printf("showing frame: %i %ix%i\n", yuvFrame->fmt, yuvFrame->width, yuvFrame->height ); + SimpleModule::process( *currentFrame ); + } + } + } + } + + const char *name() { return "Video Camera Source"; } + Format inputFormat() { return "FRAME_ID_VIDEO_CAMERA_SOURCE"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } +}; +*/ + diff --git a/research/pipeline/PipelineManager.cpp b/research/pipeline/PipelineManager.cpp new file mode 100644 index 0000000..e003559 --- /dev/null +++ b/research/pipeline/PipelineManager.cpp @@ -0,0 +1,162 @@ + +class PipelineManager : public Thread { +public: + PipelineManager(); + void addSource( Format frameType ); + void addDestination( Format frameType ); + void clearTargets(); + void connectTogether(Module *m1, Module *m2, const Frame &f); + void makeConnections(Module *start); + void start( Frame *frame ) { Thread::start( (void *)frame ); } + void execute( void *p ); + void unconnectedRoute( Module *m, const Frame &f ); +private: + std::list sourceModules; + std::list destinationModules; + std::list source; + std::list destination; +}; + + +PipelineManager *pipelineMgr = 0; + + +PipelineManager::PipelineManager() +{ +} + +/* +void PipelineManager::newModule( Module *m ) +{ + printf("adding module: %s\n", m->name() ); + + allModules.push_front( m ); + + // update source modules list + for ( list::iterator it = source.begin(); it != source.end(); ++it ) { + if ( (*it) == m->inputFormat() ) { + sourceModules.push_front( m ); + // Just add it once + break; + } + } + + // update destination modules list + for ( list::iterator it = destination.begin(); it != destination.end(); ++it ) { + if ( (*it) == m->outputFormat() ) { + destinationModules.push_front( m ); + // Just add it once + break; + } + } +} +*/ + +void PipelineManager::addSource( Format frameType ) +{ + // update source modules list + Module *m = moduleMapper()->findModuleWithInputFormat( frameType ); + if ( m ) { + printf("adding source: %s\n", (const char *)frameType ); + source.push_front( frameType ); + sourceModules.push_front( m ); + } else { + printf("No source for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::addDestination( Format frameType ) +{ + Module *m = moduleMapper()->findModuleWithOutputFormat( frameType ); + if ( m ) { + printf("adding destination: %s\n", (const char *)frameType ); + destination.push_front( frameType ); + destinationModules.push_front( m ); + } else { + printf("No destination for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::clearTargets() +{ + sourceModules.clear(); + destinationModules.clear(); + source.clear(); + destination.clear(); +} + +void PipelineManager::connectTogether( Module *m1, Module *m2, const Frame &f ) +{ +/* + //printf(" [%s] -> [%s] %s", m1->outputFormat(), m2->inputFormat(), m2->name() ); + printf(" -> %s", m2->name() ); + + staticDispatch( m2, Init, 0 ); + + if ( m2->isBlocking() || m1->isBlocking() ) { + ThreadBoundryModule *threadModule = new ThreadBoundryModule( 32, m2->inputFormat() ); + threadModule->init(); + m1->connectTo( threadModule, f ); + threadModule->connectTo( m2, f ); + } else { + m1->connectTo( m2, f ); + } +*/ +} + +/* + Connects together module with a module that can process the frame + and then gets the module to process this first frame +*/ +void PipelineManager::unconnectedRoute( Module *m, const Frame &f ) +{ + Module *m2 = moduleMapper()->findModuleWithInputFormat( f.id() ); + if ( m2 ) { + //connectTogether( m, m2, f ); + printf("Connecting together: %s -> %s\n", m->name(), m2->name() ); + staticDispatch( m2, Init, 0 ); + m->connectTo( m2, f ); + m2->connectedFrom( m, f ); + staticDispatch( m2, Process, &f ); + } else { + printf("Didn't find route for %s\n", m->name()); + } +} + +void PipelineManager::makeConnections( Module *start ) +{ +/* + printf("making connections:\n"); + + Frame frame( "UNKNOWN", 0 ); + Module *currentModule = start; + Format dstFmt = destination.front(); + + dispatch( currentModule, Init, 0 ); + printf(" %s (pid: %i)", currentModule->name(), getpid() ); + + while ( currentModule->outputFormat() != dstFmt ) { + Module *m = moduleMapper()->findModuleWithInputFormat( currentModule->outputFormat() ); + if ( m ) { + connectTogether( currentModule, m, frame ); + currentModule = m; + } else { + break; + } + } + printf("\n"); +*/ +} + + +void PipelineManager::execute( void *d ) +{ + printf("starting...\n"); + for ( list::iterator it = sourceModules.begin(); it != sourceModules.end(); ++it ) { + //makeConnections( (*it) ); + staticDispatch( (*it), Init, 0 ); + staticDispatch( (*it), Process, d ); + } +} + + diff --git a/research/pipeline/README.md b/research/pipeline/README.md new file mode 100644 index 0000000..8df026f --- /dev/null +++ b/research/pipeline/README.md @@ -0,0 +1,30 @@ + + +Example sources to support: + +file:/home/user/Documents/images/jpeg/picture.jpg +file:/home/user/Documents/audio/mpeg/greatestsong.mp3 +file:/home/user/Documents/application/playlist/favourites.pls +file:/home/user/Documents/application/playlist/favourites.mpu +http://www.slashdot.org/somefile.mpg +http://www.streaming_radio_server.net:9000 +http://www.streaming_tv_server.net:9000 +camera +microphone +camera & microphone + + +Example outputs to support: + +File/URL +UDP packets +TCP/IP packets +OSS +Alsa +QSS +Visualiser +QDirectPainter +QPainter +XShm +DirectDraw +YUV acceleration diff --git a/research/pipeline/Types/Deadcode.cpp b/research/pipeline/Types/Deadcode.cpp new file mode 100644 index 0000000..d08e52a --- /dev/null +++ b/research/pipeline/Types/Deadcode.cpp @@ -0,0 +1,140 @@ + + +#if 0 + +1 = registerNewFormat("AAC", ".aac", "An AAC decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MP3 decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MAD decoder", AUDIO_CODEC); +1 = registerNewFormat("AAC", ".aac", "My AAC decoder", AUDIO_CODEC); +3 = registerNewFormat("3DS", ".3ds", "3D Studio File", AUDIO_CODEC); + +enum FormatType { + FRAME_ID_FILE_PROTO, + FRAME_ID_HTTP_PROTO, + FRAME_ID_RTSP_PROTO, + FRAME_ID_RTP_PROTO, + FRAME_ID_MMS_PROTO, + + FRAME_ID_GIF_FORMAT, + FRAME_ID_JPG_FORMAT, + FRAME_ID_PNG_FORMAT, + + FRAME_ID_MP3_FORMAT, + FRAME_ID_WAV_FORMAT, + FRAME_ID_GSM_FORMAT, + FRAME_ID_AMR_FORMAT, + + FRAME_ID_MPG_FORMAT, + FRAME_ID_AVI_FORMAT, + FRAME_ID_MP4_FORMAT, + FRAME_ID_MOV_FORMAT, + + FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG_AUDIO_PACKET = FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET, + FRAME_ID_GSM_AUDIO_PACKET, + FRAME_ID_AMR_AUDIO_PACKET, + FRAME_ID_AAC_AUDIO_PACKET, + FRAME_ID_LAST_PACKET_TYPE = FRAME_ID_AMR_AUDIO_PACKET, + + FRAME_ID_VIDEO_PACKET, + FRAME_ID_AUDIO_PACKET, + + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME, + + FRAME_ID_PCM_AUDIO_DATA, + + FRAME_ID_RENDERED_AUDIO, + FRAME_ID_RENDERED_VIDEO, + + FRAME_ID_URL_SOURCE, + FRAME_ID_AUDIO_SOURCE, + FRAME_ID_VIDEO_SOURCE, + + FRAME_ID_MULTIPLE_FORMAT, + FRAME_ID_ANY_ONE_OF_FORMAT, + + FRAME_ID_MULTIPLE_PACKET, + FRAME_ID_ANY_ONE_OF_PACKET, + + FRAME_ID_UNKNOWN +}; + +typedef struct FRAME_GENERIC { +/* + int generalId; + int specificId; + int streamId; +*/ + int bytes; + char* bits; + int pts; +}; + +enum videoCodecId { + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET +}; + +typedef struct FRAME_VIDEO_PACKET { + int codecId; + int bytes; + char* bits; +}; + +enum videoFrameFormat { + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME +}; + +typedef struct FRAME_VIDEO_FRAME { + int format; + int width; + int height; + int bytes; + char* bits; +}; + +struct UpPCMPacket { + int freq; + int bitsPerSample; + int size; + char data[1]; +}; + +struct DownPCMPacket { + +}; + +#endif + + + +/* +struct StreamPacket { + void *private; // AVPacket *pkt; + int streamId; + int size; + char *data; +}; +*/ + +/* +struct StreamPacket { + int streamId; + Frame frame; +}; +*/ + diff --git a/research/pipeline/Types/Format.hpp b/research/pipeline/Types/Format.hpp new file mode 100644 index 0000000..72642b6 --- /dev/null +++ b/research/pipeline/Types/Format.hpp @@ -0,0 +1,29 @@ +#pragma once +#include + +// Format +class Format +{ +public: + Format() : s(nullptr) { } + Format(const Format &other) : s( other.s ) { } + Format(const char *str) : s( str ) { } + bool operator==(const Format& other) + { + return !std::strcmp(other.s, s); + } + operator const char *() + { + return s; + } + bool operator==(const char *other) + { + return !std::strcmp(s, other); + } + bool operator<(const Format& other) const + { + return std::strcmp(other.s, s) < 0; + } +private: + const char *s; +}; diff --git a/research/pipeline/Types/Frame.hpp b/research/pipeline/Types/Frame.hpp new file mode 100644 index 0000000..35ddb08 --- /dev/null +++ b/research/pipeline/Types/Frame.hpp @@ -0,0 +1,51 @@ +#pragma once +#include +#include "Format.hpp" + +// Frame +class Frame +{ +public: + Frame() { } + + Frame( const char* id, void* data ) + : counter( 0 ) + , type( id ) + , bits( data ) + { + pthread_mutex_init( &mutex, NULL ); + } + + void ref() const + { + pthread_mutex_lock( &mutex ); + ++counter; + pthread_mutex_unlock( &mutex ); + } + + void deref() const + { + pthread_mutex_lock( &mutex ); + --counter; + pthread_mutex_unlock( &mutex ); + } + + int refcount() const + { + int ret; + pthread_mutex_lock( &mutex ); + ret = counter; + pthread_mutex_unlock( &mutex ); + return ret; + } + + Format id() const { return type; } + void* data() const { return bits; } + +private: + mutable pthread_mutex_t mutex; + mutable int counter; + Format type; + void *bits; +}; + diff --git a/research/pipeline/Types/Module.hpp b/research/pipeline/Types/Module.hpp new file mode 100644 index 0000000..f0ad0fc --- /dev/null +++ b/research/pipeline/Types/Module.hpp @@ -0,0 +1,118 @@ +#pragma once +#include +#include +#include +#include "Frame.hpp" +#include "Format.hpp" + +class Module; + +enum Commands { Init, Pull, Deref, Process, Simulate, ConnectToModule, ConnectedFrom }; + +typedef Module *Address; + +struct Command { + Address address; + Commands command; + const void *arg; +}; + +// CommandQueue +class CommandQueue { +public: + CommandQueue( int size ); + + void add( const Command & ); + const Command &remove(); + +private: + int max; + const Command **commands; + int in, out; + + pthread_mutex_t mutex; + sem_t free; + sem_t used; +}; + +CommandQueue::CommandQueue( int size ) + : max( size ), in( 0 ), out( 0 ) +{ + commands = new const Command*[max]; + pthread_mutex_init( &mutex, NULL ); + sem_init( &free, 0, max ); + sem_init( &used, 0, 0 ); +} + +void CommandQueue::add( const Command &command ) +{ + while( sem_wait( &free ) != 0 ); + pthread_mutex_lock( &mutex ); + + commands[in] = &command; + in = ( in + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &used ); +} + +const Command &CommandQueue::remove() +{ + while( sem_wait( &used ) != 0 ); + pthread_mutex_lock( &mutex ); + + const Command *command = commands[out]; + out = ( out + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &free ); + + return *command; +} + + + +class ModuleFactory { +public: + ModuleFactory() { } + + virtual const char *name() = 0; + + virtual std::list
threadAffinity() = 0; + virtual bool isBlocking() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; + virtual bool supportsInputFormat( Format ) = 0; + virtual bool supportsOutputFormat( Format ) = 0; + + virtual Module *createInstance() = 0; +}; + + + +// Modules +class Module { +public: + Module() { } + + virtual const char *name() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; +// virtual bool constFrameProcessing() = 0; + +// virtual bool supportsInputType( Format ) = 0; + virtual bool supportsOutputType( Format ) = 0; + +// virtual list inputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } +// virtual list outputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } + + virtual bool isBlocking() = 0;//{ return false; } + virtual std::list
threadAffinity() = 0; + +// virtual void command( Command command, const void *arg, bool priorityFlag ) = 0; + virtual void command( Commands command, const void *arg ) = 0; + + virtual void connectTo( Module *next, const Frame &f ) = 0; + virtual void connectedFrom( Module *next, const Frame &f ) = 0; +}; + diff --git a/research/pipeline/Types/PCMData.hpp b/research/pipeline/Types/PCMData.hpp new file mode 100644 index 0000000..57de038 --- /dev/null +++ b/research/pipeline/Types/PCMData.hpp @@ -0,0 +1,7 @@ +#pragma once + +struct PCMData +{ + int size; + char data[65536]; +}; diff --git a/research/pipeline/Types/Thread.hpp b/research/pipeline/Types/Thread.hpp new file mode 100644 index 0000000..d7922a2 --- /dev/null +++ b/research/pipeline/Types/Thread.hpp @@ -0,0 +1,41 @@ +#pragma once + +// Utils +class Thread { +public: + Thread(); + int start( void* arg ); + +protected: + int run( void* arg ); + static void* entryPoint( void* ); + virtual void setup() { }; + virtual void execute( void* ) = 0; + void* arg() const { return arg_; } + void setArg( void* a ) { arg_ = a; } + +private: + pthread_t tid_; + void* arg_; +}; + +Thread::Thread() {} + +int Thread::start( void* arg ) +{ + setArg(arg); + return pthread_create( &tid_, 0, Thread::entryPoint, this ); +} + +int Thread::run( void* arg ) +{ + printf(" (pid: %i)", getpid() ); + setup(); + execute( arg ); +} + +void* Thread::entryPoint( void* pthis ) +{ + Thread* pt = (Thread*)pthis; + pt->run( pt->arg() ); +} diff --git a/research/pipeline/Types/YUVFrame.hpp b/research/pipeline/Types/YUVFrame.hpp new file mode 100644 index 0000000..109f9a4 --- /dev/null +++ b/research/pipeline/Types/YUVFrame.hpp @@ -0,0 +1,16 @@ +#pragma once +#include "libavcodec/avcodec.h" + + +struct YUVFrame { + int width; + int height; + enum AVPixelFormat fmt; + AVFrame *pic; +/* + uchar *y; + uchar *u; + uchar *v; + int scanlineWidth[3]; +*/ +}; diff --git a/research/pipeline/prototype.cpp b/research/pipeline/prototype.cpp new file mode 100755 index 0000000..a0c03f9 --- /dev/null +++ b/research/pipeline/prototype.cpp @@ -0,0 +1,172 @@ +/* + + Project Carmack 0.01 (AKA Media Library Prototype 01/02) + Copyright John Ryland, 2005 + +*/ + +using namespace std; + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "Types/Frame.hpp" +#include "Types/Thread.hpp" +#include "Types/Module.hpp" + + +#define WIDTH 160 +#define HEIGHT 120 + + + + +/* +class ModulesThread : public Thread, public DispatchInterface { +public: + void execute( void* ) + { + for (;;) { + CommandStruct *command = buffer.remove(); + command->module->command( command->command, command->arg ); + } + } + + void dispatch( CommandStruct *command ) + { + buffer.add( command ); + } + +private: + CommandQueue buffer; +}; +*/ + + + + +static void staticDispatch( Address address, Commands command, const void *arg ) +{ + moduleMapper()->dispatchCommand( address, command, arg ); +} + + + + +struct FFMpegStreamPacket { + AVPacket *packet; +}; + + + + + +void ProcessMessages(); + + + + + + + +Module *a, *b, *c, *d; + + +void registerModules() +{ + moduleMapper()->addModule( new OSSRenderer ); +// moduleMapper()->addModule( d = new YUVRenderer ); + moduleMapper()->addModule( d = new DirectDrawRenderer ); + moduleMapper()->addModule( new MP3DecodeModule ); +// moduleMapper()->addModule( new FFMpegMuxModule ); + moduleMapper()->addModule( new MpegDecodeModule ); +// moduleMapper()->addModule( new MP3SourceModule ); +// moduleMapper()->addModule( new StreamDemuxModule ); + moduleMapper()->addModule( c = new MpegEncodeModule ); +// moduleMapper()->addModule( b = new Splitter ); + moduleMapper()->addModule( new FFMpegSourceModule ); +// moduleMapper()->addModule( a = new VideoCameraSourceModule ); +} + +void playFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_AUDIO" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + //pipelineMgr->start( &file ); + pipelineMgr->execute( &file ); +} + + +void displayCamera() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +void reEncodeFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + pipelineMgr->start( &file ); +} + +void recordVideo() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +int main( int argc, char** argv ) +{ + registerModules(); + pipelineMgr = new PipelineManager; +/* + Frame f; + printf("Connecting together: %s -> %s\n", a->name(), b->name() ); + staticDispatch( b, Init, 0 ); + a->connectTo( b, f ); +// b->connectedFrom( a, f ); + + printf("Connecting together: %s -> %s\n", b->name(), c->name() ); + staticDispatch( c, Init, 0 ); + b->connectTo( c, f ); + + printf("Connecting together: %s -> %s\n", b->name(), d->name() ); + staticDispatch( d, Init, 0 ); + b->connectTo( d, f ); +*/ + playFile( (argc > 1) ? argv[1] : "test.mpg" ); + //reEncodeFile( (argc > 1) ? argv[1] : "test.mpg" ); + //displayCamera(); + //recordVideo(); +} + diff --git a/research/string-tables/.gitignore b/research/string-tables/.gitignore new file mode 100644 index 0000000..7c6ad91 --- /dev/null +++ b/research/string-tables/.gitignore @@ -0,0 +1,61 @@ +build/cmake_install.cmake +build/CMakeCache.txt +build/compile_commands.json +build/FixedStrings.inl +build/libProgram.a +build/libStringsTable.a +build/Makefile +build/StringsTableTest +build/CMakeFiles/cmake.check_cache +build/CMakeFiles/CMakeDirectoryInformation.cmake +build/CMakeFiles/CMakeOutput.log +build/CMakeFiles/CMakeRuleHashes.txt +build/CMakeFiles/feature_tests.bin +build/CMakeFiles/feature_tests.c +build/CMakeFiles/feature_tests.cxx +build/CMakeFiles/Makefile.cmake +build/CMakeFiles/Makefile2 +build/CMakeFiles/progress.marks +build/CMakeFiles/TargetDirectories.txt +build/CMakeFiles/3.5.1/CMakeCCompiler.cmake +build/CMakeFiles/3.5.1/CMakeCXXCompiler.cmake +build/CMakeFiles/3.5.1/CMakeDetermineCompilerABI_C.bin +build/CMakeFiles/3.5.1/CMakeDetermineCompilerABI_CXX.bin +build/CMakeFiles/3.5.1/CMakeSystem.cmake +build/CMakeFiles/3.5.1/CompilerIdC/a.out +build/CMakeFiles/3.5.1/CompilerIdC/CMakeCCompilerId.c +build/CMakeFiles/3.5.1/CompilerIdCXX/a.out +build/CMakeFiles/3.5.1/CompilerIdCXX/CMakeCXXCompilerId.cpp +build/CMakeFiles/Program.dir/build.make +build/CMakeFiles/Program.dir/cmake_clean_target.cmake +build/CMakeFiles/Program.dir/cmake_clean.cmake +build/CMakeFiles/Program.dir/CXX.includecache +build/CMakeFiles/Program.dir/depend.internal +build/CMakeFiles/Program.dir/depend.make +build/CMakeFiles/Program.dir/DependInfo.cmake +build/CMakeFiles/Program.dir/flags.make +build/CMakeFiles/Program.dir/link.txt +build/CMakeFiles/Program.dir/program.cpp.o +build/CMakeFiles/Program.dir/progress.make +build/CMakeFiles/StringsTable.dir/build.make +build/CMakeFiles/StringsTable.dir/cmake_clean_target.cmake +build/CMakeFiles/StringsTable.dir/cmake_clean.cmake +build/CMakeFiles/StringsTable.dir/CXX.includecache +build/CMakeFiles/StringsTable.dir/depend.internal +build/CMakeFiles/StringsTable.dir/depend.make +build/CMakeFiles/StringsTable.dir/DependInfo.cmake +build/CMakeFiles/StringsTable.dir/FixedStrings.cpp.o +build/CMakeFiles/StringsTable.dir/flags.make +build/CMakeFiles/StringsTable.dir/link.txt +build/CMakeFiles/StringsTable.dir/progress.make +build/CMakeFiles/StringsTableTest.dir/build.make +build/CMakeFiles/StringsTableTest.dir/cmake_clean.cmake +build/CMakeFiles/StringsTableTest.dir/CXX.includecache +build/CMakeFiles/StringsTableTest.dir/depend.internal +build/CMakeFiles/StringsTableTest.dir/depend.make +build/CMakeFiles/StringsTableTest.dir/DependInfo.cmake +build/CMakeFiles/StringsTableTest.dir/flags.make +build/CMakeFiles/StringsTableTest.dir/link.txt +build/CMakeFiles/StringsTableTest.dir/main.cpp.o +build/CMakeFiles/StringsTableTest.dir/progress.make +README.pdf diff --git a/research/string-tables/.vscode/Code.code-workspace b/research/string-tables/.vscode/Code.code-workspace new file mode 100644 index 0000000..c7e938e --- /dev/null +++ b/research/string-tables/.vscode/Code.code-workspace @@ -0,0 +1,49 @@ +{ + "folders": [ + { + "path": ".." + }, + { + "path": "../../framework" + } + ], + "settings": { + "files.associations": { + "*.tpp": "cpp", + "functional": "cpp", + "optional": "cpp", + "array": "cpp", + "*.tcc": "cpp", + "cctype": "cpp", + "clocale": "cpp", + "cmath": "cpp", + "cstdarg": "cpp", + "cstdint": "cpp", + "cstdio": "cpp", + "cstdlib": "cpp", + "cwchar": "cpp", + "cwctype": "cpp", + "deque": "cpp", + "unordered_map": "cpp", + "vector": "cpp", + "exception": "cpp", + "algorithm": "cpp", + "system_error": "cpp", + "tuple": "cpp", + "type_traits": "cpp", + "fstream": "cpp", + "initializer_list": "cpp", + "iosfwd": "cpp", + "istream": "cpp", + "limits": "cpp", + "new": "cpp", + "ostream": "cpp", + "numeric": "cpp", + "sstream": "cpp", + "stdexcept": "cpp", + "streambuf": "cpp", + "utility": "cpp", + "typeinfo": "cpp" + } + } +} \ No newline at end of file diff --git a/research/string-tables/.vscode/c_cpp_properties.json b/research/string-tables/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..1b72752 --- /dev/null +++ b/research/string-tables/.vscode/c_cpp_properties.json @@ -0,0 +1,42 @@ +{ + "configurations": [ + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**", + "/usr/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + }, + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "gcc-x64", + "compileCommands": "${workspaceFolder}/build/compile_commands.json" + }, + { + "name": "Mac", + "includePath": [ + "/usr/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + } + }, + { + "name": "Win32", + "includePath": [ + "c:/Program Files (x86)/Microsoft Visual Studio 14.0/VC/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/string-tables/.vscode/launch.json b/research/string-tables/.vscode/launch.json new file mode 100644 index 0000000..ff5abd3 --- /dev/null +++ b/research/string-tables/.vscode/launch.json @@ -0,0 +1,32 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "(gdb) Launch", + "type": "cppdbg", + "request": "launch", + "program": "${workspaceFolder}/build/StringsTableTest", + "args": [], + "stopAtEntry": false, + "cwd": "${workspaceFolder}", + "environment": [ + { + "name": "LD_LIBRARY_PATH", + "value": "" + } + ], + "externalConsole": false, + "MIMode": "gdb", + "setupCommands": [ + { + "description": "Enable pretty-printing for gdb", + "text": "-enable-pretty-printing", + "ignoreFailures": true + } + ] + } + ] +} \ No newline at end of file diff --git a/research/string-tables/.vscode/tasks.json b/research/string-tables/.vscode/tasks.json new file mode 100644 index 0000000..64a18e6 --- /dev/null +++ b/research/string-tables/.vscode/tasks.json @@ -0,0 +1,41 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "Build C++ project", + "type": "shell", + "group": "build", + "command": "cd ./build && make", + "problemMatcher": [] + }, + { + "label": "Build & run C++ project", + "type": "shell", + "group": { + "kind": "build", + "isDefault": true + }, + "command": "cd ./build && make && ./StringsTableTest", + "problemMatcher": [] + }, + { + "label": "Build CMake", + "type": "shell", + "group": "build", + "command": "cd ./build && cmake build .." + }, + { + "label": "Compile Markdown", + "type": "shell", + "args": [], + "command": "${command:extension.markdown-pdf: Export (PDF)}", + + "command2": "markdown-it README.md -o README.html", + "presentation": { + "reveal": "never", + "panel": "shared", + }, + "problemMatcher": [] + } + ] +} \ No newline at end of file diff --git a/research/string-tables/CMakeLists.txt b/research/string-tables/CMakeLists.txt new file mode 100644 index 0000000..4faf650 --- /dev/null +++ b/research/string-tables/CMakeLists.txt @@ -0,0 +1,28 @@ +cmake_minimum_required(VERSION 3.5.0) + +# set the project name and version +project(StringsTableTest VERSION 1.0) + +# specify the C++ standard +set(CMAKE_CXX_STANDARD 14) +set(CMAKE_CXX_STANDARD_REQUIRED True) + +# add the Program library +add_library(Program program.cpp) + +# add the StringsTable library +add_library(StringsTable FixedStrings.cpp FixedStrings.inl) +target_include_directories(StringsTable PUBLIC build) + +# add the executable +add_executable(StringsTableTest main.cpp) +target_link_libraries(StringsTableTest PUBLIC Program StringsTable) + +# add generator to create the strings table +add_custom_command( + OUTPUT ${CMAKE_CURRENT_SOURCE_DIR}/FixedStrings.inl + COMMAND ${CMAKE_CXX_COMPILER} ../main.cpp $ -o dummy 2>&1 + | sed -n 's@.\*undefined.\*cFixedStringId_\\\([[:alnum:]_]\*\\\).\*@DEFINE_FIXED_STRING\(\\1\)@p' + | sort | uniq > FixedStrings.inl + DEPENDS Program +) diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/Modules/ThreadBoundaryModule.cpp b/research/pipeline/Modules/ThreadBoundaryModule.cpp new file mode 100644 index 0000000..e4b07d4 --- /dev/null +++ b/research/pipeline/Modules/ThreadBoundaryModule.cpp @@ -0,0 +1,89 @@ + +/* + +class Consumer : public RoutingModule { +public: + Consumer( CommandQueue* b, Format format ) + : RoutingModule(), buffer( b ), formatId( format ) + { } + + void init() + { + } + + void start() + { + for (;;) { + const Command &command = buffer->remove(); + RoutingModule::command( command.command, command.arg ); + } + } + + const char* name() { return "Consumer"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue *buffer; + Format formatId; +}; + +class ConsumerThread : public Thread { +public: + ConsumerThread( Consumer *c ) + : consumer( c ) + { } + + void execute( void* ) + { + consumer->start(); + } + +private: + Consumer *consumer; +}; + + +class ThreadBoundryModule : public RoutingModule { +public: + ThreadBoundryModule( int size, Format format ) + : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ), + consumerThread( &consumer ), formatId( format ) + { + } + + void init() + { + } + + void connectTo( Module *m, const Frame &f ) + { + consumer.connectTo( m, f ); + consumerThread.start(0); + } + + void process( const Frame &frame ) + { + readCommandQueue.add( frame ); + } + + const char *name() { return "Thread Boundry Module"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue readCommandQueue; + Consumer consumer; + ConsumerThread consumerThread; + Format formatId; +}; + + +class ProcessBoundryThing : public DispatchInterface { +public: + void dispatch( Command *command ) + { + } +}; + +*/ diff --git a/research/pipeline/Modules/VideoCameraSourceModule.cpp b/research/pipeline/Modules/VideoCameraSourceModule.cpp new file mode 100644 index 0000000..deef2f9 --- /dev/null +++ b/research/pipeline/Modules/VideoCameraSourceModule.cpp @@ -0,0 +1,101 @@ + + +/* +class VideoCameraSourceModule : public SimpleModule { +public: + VideoCameraSourceModule() + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + AVFormatContext *avFormatContext = 0; + AVFormatParameters vp1, *vp = &vp1; + AVInputFormat *fmt1; + memset(vp, 0, sizeof(*vp)); + fmt1 = av_find_input_format("video4linux");//video_grab_format); + vp->device = 0;//"/dev/video";//video_device; + vp->channel = 0;//video_channel; + vp->standard = "pal";//"ntsc";//video_standard; + vp->width = WIDTH; + vp->height = HEIGHT; + vp->frame_rate = 50; + vp->frame_rate_base = 1; + if (av_open_input_file(&avFormatContext, "", fmt1, 0, vp) < 0) { + printf("Could not find video grab device\n"); + exit(1); + } + if ((avFormatContext->ctx_flags & AVFMTCTX_NOHEADER) && av_find_stream_info(avFormatContext) < 0) { + printf("Could not find video grab parameters\n"); + exit(1); + } + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + +// AVCodecContext *videoCodecContext = avcodec_alloc_context(); + AVCodecContext *videoCodecContext = &avFormatContext->streams[0]->codec; + AVCodec *codec = avcodec_find_decoder(avFormatContext->streams[0]->codec.codec_id); + + if ( !codec ) { + printf("error finding decoder\n"); + return; + } + + printf("found decoder: %s\n", codec->name); + + avFormatContext->streams[0]->r_frame_rate = vp->frame_rate; + avFormatContext->streams[0]->r_frame_rate_base = vp->frame_rate_base; + + videoCodecContext->width = vp->width; + videoCodecContext->height = vp->height; + +// if ( avcodec_open( videoCodecContext, &rawvideo_decoder ) < 0 ) { + if ( avcodec_open( videoCodecContext, codec ) < 0 ) { // is rawvideo_decoder + printf("error opening context\n"); + videoCodecContext = 0; + } + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + AVPacket pkt; + while( avFormatContext ) { + if ( av_read_frame(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + AVFrame *picture = avcodec_alloc_frame(); + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = picture; + Frame *currentFrame = new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + currentFrame->ref(); + + int gotPicture = 0; + avcodec_decode_video( videoCodecContext, picture, &gotPicture, pkt.data, pkt.size ); + + if ( gotPicture ) { + yuvFrame->fmt = videoCodecContext->pix_fmt; // is PIX_FMT_YUV422 + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; +// printf("showing frame: %i %ix%i\n", yuvFrame->fmt, yuvFrame->width, yuvFrame->height ); + SimpleModule::process( *currentFrame ); + } + } + } + } + + const char *name() { return "Video Camera Source"; } + Format inputFormat() { return "FRAME_ID_VIDEO_CAMERA_SOURCE"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } +}; +*/ + diff --git a/research/pipeline/PipelineManager.cpp b/research/pipeline/PipelineManager.cpp new file mode 100644 index 0000000..e003559 --- /dev/null +++ b/research/pipeline/PipelineManager.cpp @@ -0,0 +1,162 @@ + +class PipelineManager : public Thread { +public: + PipelineManager(); + void addSource( Format frameType ); + void addDestination( Format frameType ); + void clearTargets(); + void connectTogether(Module *m1, Module *m2, const Frame &f); + void makeConnections(Module *start); + void start( Frame *frame ) { Thread::start( (void *)frame ); } + void execute( void *p ); + void unconnectedRoute( Module *m, const Frame &f ); +private: + std::list sourceModules; + std::list destinationModules; + std::list source; + std::list destination; +}; + + +PipelineManager *pipelineMgr = 0; + + +PipelineManager::PipelineManager() +{ +} + +/* +void PipelineManager::newModule( Module *m ) +{ + printf("adding module: %s\n", m->name() ); + + allModules.push_front( m ); + + // update source modules list + for ( list::iterator it = source.begin(); it != source.end(); ++it ) { + if ( (*it) == m->inputFormat() ) { + sourceModules.push_front( m ); + // Just add it once + break; + } + } + + // update destination modules list + for ( list::iterator it = destination.begin(); it != destination.end(); ++it ) { + if ( (*it) == m->outputFormat() ) { + destinationModules.push_front( m ); + // Just add it once + break; + } + } +} +*/ + +void PipelineManager::addSource( Format frameType ) +{ + // update source modules list + Module *m = moduleMapper()->findModuleWithInputFormat( frameType ); + if ( m ) { + printf("adding source: %s\n", (const char *)frameType ); + source.push_front( frameType ); + sourceModules.push_front( m ); + } else { + printf("No source for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::addDestination( Format frameType ) +{ + Module *m = moduleMapper()->findModuleWithOutputFormat( frameType ); + if ( m ) { + printf("adding destination: %s\n", (const char *)frameType ); + destination.push_front( frameType ); + destinationModules.push_front( m ); + } else { + printf("No destination for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::clearTargets() +{ + sourceModules.clear(); + destinationModules.clear(); + source.clear(); + destination.clear(); +} + +void PipelineManager::connectTogether( Module *m1, Module *m2, const Frame &f ) +{ +/* + //printf(" [%s] -> [%s] %s", m1->outputFormat(), m2->inputFormat(), m2->name() ); + printf(" -> %s", m2->name() ); + + staticDispatch( m2, Init, 0 ); + + if ( m2->isBlocking() || m1->isBlocking() ) { + ThreadBoundryModule *threadModule = new ThreadBoundryModule( 32, m2->inputFormat() ); + threadModule->init(); + m1->connectTo( threadModule, f ); + threadModule->connectTo( m2, f ); + } else { + m1->connectTo( m2, f ); + } +*/ +} + +/* + Connects together module with a module that can process the frame + and then gets the module to process this first frame +*/ +void PipelineManager::unconnectedRoute( Module *m, const Frame &f ) +{ + Module *m2 = moduleMapper()->findModuleWithInputFormat( f.id() ); + if ( m2 ) { + //connectTogether( m, m2, f ); + printf("Connecting together: %s -> %s\n", m->name(), m2->name() ); + staticDispatch( m2, Init, 0 ); + m->connectTo( m2, f ); + m2->connectedFrom( m, f ); + staticDispatch( m2, Process, &f ); + } else { + printf("Didn't find route for %s\n", m->name()); + } +} + +void PipelineManager::makeConnections( Module *start ) +{ +/* + printf("making connections:\n"); + + Frame frame( "UNKNOWN", 0 ); + Module *currentModule = start; + Format dstFmt = destination.front(); + + dispatch( currentModule, Init, 0 ); + printf(" %s (pid: %i)", currentModule->name(), getpid() ); + + while ( currentModule->outputFormat() != dstFmt ) { + Module *m = moduleMapper()->findModuleWithInputFormat( currentModule->outputFormat() ); + if ( m ) { + connectTogether( currentModule, m, frame ); + currentModule = m; + } else { + break; + } + } + printf("\n"); +*/ +} + + +void PipelineManager::execute( void *d ) +{ + printf("starting...\n"); + for ( list::iterator it = sourceModules.begin(); it != sourceModules.end(); ++it ) { + //makeConnections( (*it) ); + staticDispatch( (*it), Init, 0 ); + staticDispatch( (*it), Process, d ); + } +} + + diff --git a/research/pipeline/README.md b/research/pipeline/README.md new file mode 100644 index 0000000..8df026f --- /dev/null +++ b/research/pipeline/README.md @@ -0,0 +1,30 @@ + + +Example sources to support: + +file:/home/user/Documents/images/jpeg/picture.jpg +file:/home/user/Documents/audio/mpeg/greatestsong.mp3 +file:/home/user/Documents/application/playlist/favourites.pls +file:/home/user/Documents/application/playlist/favourites.mpu +http://www.slashdot.org/somefile.mpg +http://www.streaming_radio_server.net:9000 +http://www.streaming_tv_server.net:9000 +camera +microphone +camera & microphone + + +Example outputs to support: + +File/URL +UDP packets +TCP/IP packets +OSS +Alsa +QSS +Visualiser +QDirectPainter +QPainter +XShm +DirectDraw +YUV acceleration diff --git a/research/pipeline/Types/Deadcode.cpp b/research/pipeline/Types/Deadcode.cpp new file mode 100644 index 0000000..d08e52a --- /dev/null +++ b/research/pipeline/Types/Deadcode.cpp @@ -0,0 +1,140 @@ + + +#if 0 + +1 = registerNewFormat("AAC", ".aac", "An AAC decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MP3 decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MAD decoder", AUDIO_CODEC); +1 = registerNewFormat("AAC", ".aac", "My AAC decoder", AUDIO_CODEC); +3 = registerNewFormat("3DS", ".3ds", "3D Studio File", AUDIO_CODEC); + +enum FormatType { + FRAME_ID_FILE_PROTO, + FRAME_ID_HTTP_PROTO, + FRAME_ID_RTSP_PROTO, + FRAME_ID_RTP_PROTO, + FRAME_ID_MMS_PROTO, + + FRAME_ID_GIF_FORMAT, + FRAME_ID_JPG_FORMAT, + FRAME_ID_PNG_FORMAT, + + FRAME_ID_MP3_FORMAT, + FRAME_ID_WAV_FORMAT, + FRAME_ID_GSM_FORMAT, + FRAME_ID_AMR_FORMAT, + + FRAME_ID_MPG_FORMAT, + FRAME_ID_AVI_FORMAT, + FRAME_ID_MP4_FORMAT, + FRAME_ID_MOV_FORMAT, + + FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG_AUDIO_PACKET = FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET, + FRAME_ID_GSM_AUDIO_PACKET, + FRAME_ID_AMR_AUDIO_PACKET, + FRAME_ID_AAC_AUDIO_PACKET, + FRAME_ID_LAST_PACKET_TYPE = FRAME_ID_AMR_AUDIO_PACKET, + + FRAME_ID_VIDEO_PACKET, + FRAME_ID_AUDIO_PACKET, + + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME, + + FRAME_ID_PCM_AUDIO_DATA, + + FRAME_ID_RENDERED_AUDIO, + FRAME_ID_RENDERED_VIDEO, + + FRAME_ID_URL_SOURCE, + FRAME_ID_AUDIO_SOURCE, + FRAME_ID_VIDEO_SOURCE, + + FRAME_ID_MULTIPLE_FORMAT, + FRAME_ID_ANY_ONE_OF_FORMAT, + + FRAME_ID_MULTIPLE_PACKET, + FRAME_ID_ANY_ONE_OF_PACKET, + + FRAME_ID_UNKNOWN +}; + +typedef struct FRAME_GENERIC { +/* + int generalId; + int specificId; + int streamId; +*/ + int bytes; + char* bits; + int pts; +}; + +enum videoCodecId { + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET +}; + +typedef struct FRAME_VIDEO_PACKET { + int codecId; + int bytes; + char* bits; +}; + +enum videoFrameFormat { + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME +}; + +typedef struct FRAME_VIDEO_FRAME { + int format; + int width; + int height; + int bytes; + char* bits; +}; + +struct UpPCMPacket { + int freq; + int bitsPerSample; + int size; + char data[1]; +}; + +struct DownPCMPacket { + +}; + +#endif + + + +/* +struct StreamPacket { + void *private; // AVPacket *pkt; + int streamId; + int size; + char *data; +}; +*/ + +/* +struct StreamPacket { + int streamId; + Frame frame; +}; +*/ + diff --git a/research/pipeline/Types/Format.hpp b/research/pipeline/Types/Format.hpp new file mode 100644 index 0000000..72642b6 --- /dev/null +++ b/research/pipeline/Types/Format.hpp @@ -0,0 +1,29 @@ +#pragma once +#include + +// Format +class Format +{ +public: + Format() : s(nullptr) { } + Format(const Format &other) : s( other.s ) { } + Format(const char *str) : s( str ) { } + bool operator==(const Format& other) + { + return !std::strcmp(other.s, s); + } + operator const char *() + { + return s; + } + bool operator==(const char *other) + { + return !std::strcmp(s, other); + } + bool operator<(const Format& other) const + { + return std::strcmp(other.s, s) < 0; + } +private: + const char *s; +}; diff --git a/research/pipeline/Types/Frame.hpp b/research/pipeline/Types/Frame.hpp new file mode 100644 index 0000000..35ddb08 --- /dev/null +++ b/research/pipeline/Types/Frame.hpp @@ -0,0 +1,51 @@ +#pragma once +#include +#include "Format.hpp" + +// Frame +class Frame +{ +public: + Frame() { } + + Frame( const char* id, void* data ) + : counter( 0 ) + , type( id ) + , bits( data ) + { + pthread_mutex_init( &mutex, NULL ); + } + + void ref() const + { + pthread_mutex_lock( &mutex ); + ++counter; + pthread_mutex_unlock( &mutex ); + } + + void deref() const + { + pthread_mutex_lock( &mutex ); + --counter; + pthread_mutex_unlock( &mutex ); + } + + int refcount() const + { + int ret; + pthread_mutex_lock( &mutex ); + ret = counter; + pthread_mutex_unlock( &mutex ); + return ret; + } + + Format id() const { return type; } + void* data() const { return bits; } + +private: + mutable pthread_mutex_t mutex; + mutable int counter; + Format type; + void *bits; +}; + diff --git a/research/pipeline/Types/Module.hpp b/research/pipeline/Types/Module.hpp new file mode 100644 index 0000000..f0ad0fc --- /dev/null +++ b/research/pipeline/Types/Module.hpp @@ -0,0 +1,118 @@ +#pragma once +#include +#include +#include +#include "Frame.hpp" +#include "Format.hpp" + +class Module; + +enum Commands { Init, Pull, Deref, Process, Simulate, ConnectToModule, ConnectedFrom }; + +typedef Module *Address; + +struct Command { + Address address; + Commands command; + const void *arg; +}; + +// CommandQueue +class CommandQueue { +public: + CommandQueue( int size ); + + void add( const Command & ); + const Command &remove(); + +private: + int max; + const Command **commands; + int in, out; + + pthread_mutex_t mutex; + sem_t free; + sem_t used; +}; + +CommandQueue::CommandQueue( int size ) + : max( size ), in( 0 ), out( 0 ) +{ + commands = new const Command*[max]; + pthread_mutex_init( &mutex, NULL ); + sem_init( &free, 0, max ); + sem_init( &used, 0, 0 ); +} + +void CommandQueue::add( const Command &command ) +{ + while( sem_wait( &free ) != 0 ); + pthread_mutex_lock( &mutex ); + + commands[in] = &command; + in = ( in + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &used ); +} + +const Command &CommandQueue::remove() +{ + while( sem_wait( &used ) != 0 ); + pthread_mutex_lock( &mutex ); + + const Command *command = commands[out]; + out = ( out + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &free ); + + return *command; +} + + + +class ModuleFactory { +public: + ModuleFactory() { } + + virtual const char *name() = 0; + + virtual std::list
threadAffinity() = 0; + virtual bool isBlocking() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; + virtual bool supportsInputFormat( Format ) = 0; + virtual bool supportsOutputFormat( Format ) = 0; + + virtual Module *createInstance() = 0; +}; + + + +// Modules +class Module { +public: + Module() { } + + virtual const char *name() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; +// virtual bool constFrameProcessing() = 0; + +// virtual bool supportsInputType( Format ) = 0; + virtual bool supportsOutputType( Format ) = 0; + +// virtual list inputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } +// virtual list outputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } + + virtual bool isBlocking() = 0;//{ return false; } + virtual std::list
threadAffinity() = 0; + +// virtual void command( Command command, const void *arg, bool priorityFlag ) = 0; + virtual void command( Commands command, const void *arg ) = 0; + + virtual void connectTo( Module *next, const Frame &f ) = 0; + virtual void connectedFrom( Module *next, const Frame &f ) = 0; +}; + diff --git a/research/pipeline/Types/PCMData.hpp b/research/pipeline/Types/PCMData.hpp new file mode 100644 index 0000000..57de038 --- /dev/null +++ b/research/pipeline/Types/PCMData.hpp @@ -0,0 +1,7 @@ +#pragma once + +struct PCMData +{ + int size; + char data[65536]; +}; diff --git a/research/pipeline/Types/Thread.hpp b/research/pipeline/Types/Thread.hpp new file mode 100644 index 0000000..d7922a2 --- /dev/null +++ b/research/pipeline/Types/Thread.hpp @@ -0,0 +1,41 @@ +#pragma once + +// Utils +class Thread { +public: + Thread(); + int start( void* arg ); + +protected: + int run( void* arg ); + static void* entryPoint( void* ); + virtual void setup() { }; + virtual void execute( void* ) = 0; + void* arg() const { return arg_; } + void setArg( void* a ) { arg_ = a; } + +private: + pthread_t tid_; + void* arg_; +}; + +Thread::Thread() {} + +int Thread::start( void* arg ) +{ + setArg(arg); + return pthread_create( &tid_, 0, Thread::entryPoint, this ); +} + +int Thread::run( void* arg ) +{ + printf(" (pid: %i)", getpid() ); + setup(); + execute( arg ); +} + +void* Thread::entryPoint( void* pthis ) +{ + Thread* pt = (Thread*)pthis; + pt->run( pt->arg() ); +} diff --git a/research/pipeline/Types/YUVFrame.hpp b/research/pipeline/Types/YUVFrame.hpp new file mode 100644 index 0000000..109f9a4 --- /dev/null +++ b/research/pipeline/Types/YUVFrame.hpp @@ -0,0 +1,16 @@ +#pragma once +#include "libavcodec/avcodec.h" + + +struct YUVFrame { + int width; + int height; + enum AVPixelFormat fmt; + AVFrame *pic; +/* + uchar *y; + uchar *u; + uchar *v; + int scanlineWidth[3]; +*/ +}; diff --git a/research/pipeline/prototype.cpp b/research/pipeline/prototype.cpp new file mode 100755 index 0000000..a0c03f9 --- /dev/null +++ b/research/pipeline/prototype.cpp @@ -0,0 +1,172 @@ +/* + + Project Carmack 0.01 (AKA Media Library Prototype 01/02) + Copyright John Ryland, 2005 + +*/ + +using namespace std; + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "Types/Frame.hpp" +#include "Types/Thread.hpp" +#include "Types/Module.hpp" + + +#define WIDTH 160 +#define HEIGHT 120 + + + + +/* +class ModulesThread : public Thread, public DispatchInterface { +public: + void execute( void* ) + { + for (;;) { + CommandStruct *command = buffer.remove(); + command->module->command( command->command, command->arg ); + } + } + + void dispatch( CommandStruct *command ) + { + buffer.add( command ); + } + +private: + CommandQueue buffer; +}; +*/ + + + + +static void staticDispatch( Address address, Commands command, const void *arg ) +{ + moduleMapper()->dispatchCommand( address, command, arg ); +} + + + + +struct FFMpegStreamPacket { + AVPacket *packet; +}; + + + + + +void ProcessMessages(); + + + + + + + +Module *a, *b, *c, *d; + + +void registerModules() +{ + moduleMapper()->addModule( new OSSRenderer ); +// moduleMapper()->addModule( d = new YUVRenderer ); + moduleMapper()->addModule( d = new DirectDrawRenderer ); + moduleMapper()->addModule( new MP3DecodeModule ); +// moduleMapper()->addModule( new FFMpegMuxModule ); + moduleMapper()->addModule( new MpegDecodeModule ); +// moduleMapper()->addModule( new MP3SourceModule ); +// moduleMapper()->addModule( new StreamDemuxModule ); + moduleMapper()->addModule( c = new MpegEncodeModule ); +// moduleMapper()->addModule( b = new Splitter ); + moduleMapper()->addModule( new FFMpegSourceModule ); +// moduleMapper()->addModule( a = new VideoCameraSourceModule ); +} + +void playFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_AUDIO" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + //pipelineMgr->start( &file ); + pipelineMgr->execute( &file ); +} + + +void displayCamera() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +void reEncodeFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + pipelineMgr->start( &file ); +} + +void recordVideo() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +int main( int argc, char** argv ) +{ + registerModules(); + pipelineMgr = new PipelineManager; +/* + Frame f; + printf("Connecting together: %s -> %s\n", a->name(), b->name() ); + staticDispatch( b, Init, 0 ); + a->connectTo( b, f ); +// b->connectedFrom( a, f ); + + printf("Connecting together: %s -> %s\n", b->name(), c->name() ); + staticDispatch( c, Init, 0 ); + b->connectTo( c, f ); + + printf("Connecting together: %s -> %s\n", b->name(), d->name() ); + staticDispatch( d, Init, 0 ); + b->connectTo( d, f ); +*/ + playFile( (argc > 1) ? argv[1] : "test.mpg" ); + //reEncodeFile( (argc > 1) ? argv[1] : "test.mpg" ); + //displayCamera(); + //recordVideo(); +} + diff --git a/research/string-tables/.gitignore b/research/string-tables/.gitignore new file mode 100644 index 0000000..7c6ad91 --- /dev/null +++ b/research/string-tables/.gitignore @@ -0,0 +1,61 @@ +build/cmake_install.cmake +build/CMakeCache.txt +build/compile_commands.json +build/FixedStrings.inl +build/libProgram.a +build/libStringsTable.a +build/Makefile +build/StringsTableTest +build/CMakeFiles/cmake.check_cache +build/CMakeFiles/CMakeDirectoryInformation.cmake +build/CMakeFiles/CMakeOutput.log +build/CMakeFiles/CMakeRuleHashes.txt +build/CMakeFiles/feature_tests.bin +build/CMakeFiles/feature_tests.c +build/CMakeFiles/feature_tests.cxx +build/CMakeFiles/Makefile.cmake +build/CMakeFiles/Makefile2 +build/CMakeFiles/progress.marks +build/CMakeFiles/TargetDirectories.txt +build/CMakeFiles/3.5.1/CMakeCCompiler.cmake +build/CMakeFiles/3.5.1/CMakeCXXCompiler.cmake +build/CMakeFiles/3.5.1/CMakeDetermineCompilerABI_C.bin +build/CMakeFiles/3.5.1/CMakeDetermineCompilerABI_CXX.bin +build/CMakeFiles/3.5.1/CMakeSystem.cmake +build/CMakeFiles/3.5.1/CompilerIdC/a.out +build/CMakeFiles/3.5.1/CompilerIdC/CMakeCCompilerId.c +build/CMakeFiles/3.5.1/CompilerIdCXX/a.out +build/CMakeFiles/3.5.1/CompilerIdCXX/CMakeCXXCompilerId.cpp +build/CMakeFiles/Program.dir/build.make +build/CMakeFiles/Program.dir/cmake_clean_target.cmake +build/CMakeFiles/Program.dir/cmake_clean.cmake +build/CMakeFiles/Program.dir/CXX.includecache +build/CMakeFiles/Program.dir/depend.internal +build/CMakeFiles/Program.dir/depend.make +build/CMakeFiles/Program.dir/DependInfo.cmake +build/CMakeFiles/Program.dir/flags.make +build/CMakeFiles/Program.dir/link.txt +build/CMakeFiles/Program.dir/program.cpp.o +build/CMakeFiles/Program.dir/progress.make +build/CMakeFiles/StringsTable.dir/build.make +build/CMakeFiles/StringsTable.dir/cmake_clean_target.cmake +build/CMakeFiles/StringsTable.dir/cmake_clean.cmake +build/CMakeFiles/StringsTable.dir/CXX.includecache +build/CMakeFiles/StringsTable.dir/depend.internal +build/CMakeFiles/StringsTable.dir/depend.make +build/CMakeFiles/StringsTable.dir/DependInfo.cmake +build/CMakeFiles/StringsTable.dir/FixedStrings.cpp.o +build/CMakeFiles/StringsTable.dir/flags.make +build/CMakeFiles/StringsTable.dir/link.txt +build/CMakeFiles/StringsTable.dir/progress.make +build/CMakeFiles/StringsTableTest.dir/build.make +build/CMakeFiles/StringsTableTest.dir/cmake_clean.cmake +build/CMakeFiles/StringsTableTest.dir/CXX.includecache +build/CMakeFiles/StringsTableTest.dir/depend.internal +build/CMakeFiles/StringsTableTest.dir/depend.make +build/CMakeFiles/StringsTableTest.dir/DependInfo.cmake +build/CMakeFiles/StringsTableTest.dir/flags.make +build/CMakeFiles/StringsTableTest.dir/link.txt +build/CMakeFiles/StringsTableTest.dir/main.cpp.o +build/CMakeFiles/StringsTableTest.dir/progress.make +README.pdf diff --git a/research/string-tables/.vscode/Code.code-workspace b/research/string-tables/.vscode/Code.code-workspace new file mode 100644 index 0000000..c7e938e --- /dev/null +++ b/research/string-tables/.vscode/Code.code-workspace @@ -0,0 +1,49 @@ +{ + "folders": [ + { + "path": ".." + }, + { + "path": "../../framework" + } + ], + "settings": { + "files.associations": { + "*.tpp": "cpp", + "functional": "cpp", + "optional": "cpp", + "array": "cpp", + "*.tcc": "cpp", + "cctype": "cpp", + "clocale": "cpp", + "cmath": "cpp", + "cstdarg": "cpp", + "cstdint": "cpp", + "cstdio": "cpp", + "cstdlib": "cpp", + "cwchar": "cpp", + "cwctype": "cpp", + "deque": "cpp", + "unordered_map": "cpp", + "vector": "cpp", + "exception": "cpp", + "algorithm": "cpp", + "system_error": "cpp", + "tuple": "cpp", + "type_traits": "cpp", + "fstream": "cpp", + "initializer_list": "cpp", + "iosfwd": "cpp", + "istream": "cpp", + "limits": "cpp", + "new": "cpp", + "ostream": "cpp", + "numeric": "cpp", + "sstream": "cpp", + "stdexcept": "cpp", + "streambuf": "cpp", + "utility": "cpp", + "typeinfo": "cpp" + } + } +} \ No newline at end of file diff --git a/research/string-tables/.vscode/c_cpp_properties.json b/research/string-tables/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..1b72752 --- /dev/null +++ b/research/string-tables/.vscode/c_cpp_properties.json @@ -0,0 +1,42 @@ +{ + "configurations": [ + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**", + "/usr/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + }, + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "gcc-x64", + "compileCommands": "${workspaceFolder}/build/compile_commands.json" + }, + { + "name": "Mac", + "includePath": [ + "/usr/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + } + }, + { + "name": "Win32", + "includePath": [ + "c:/Program Files (x86)/Microsoft Visual Studio 14.0/VC/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/string-tables/.vscode/launch.json b/research/string-tables/.vscode/launch.json new file mode 100644 index 0000000..ff5abd3 --- /dev/null +++ b/research/string-tables/.vscode/launch.json @@ -0,0 +1,32 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "(gdb) Launch", + "type": "cppdbg", + "request": "launch", + "program": "${workspaceFolder}/build/StringsTableTest", + "args": [], + "stopAtEntry": false, + "cwd": "${workspaceFolder}", + "environment": [ + { + "name": "LD_LIBRARY_PATH", + "value": "" + } + ], + "externalConsole": false, + "MIMode": "gdb", + "setupCommands": [ + { + "description": "Enable pretty-printing for gdb", + "text": "-enable-pretty-printing", + "ignoreFailures": true + } + ] + } + ] +} \ No newline at end of file diff --git a/research/string-tables/.vscode/tasks.json b/research/string-tables/.vscode/tasks.json new file mode 100644 index 0000000..64a18e6 --- /dev/null +++ b/research/string-tables/.vscode/tasks.json @@ -0,0 +1,41 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "Build C++ project", + "type": "shell", + "group": "build", + "command": "cd ./build && make", + "problemMatcher": [] + }, + { + "label": "Build & run C++ project", + "type": "shell", + "group": { + "kind": "build", + "isDefault": true + }, + "command": "cd ./build && make && ./StringsTableTest", + "problemMatcher": [] + }, + { + "label": "Build CMake", + "type": "shell", + "group": "build", + "command": "cd ./build && cmake build .." + }, + { + "label": "Compile Markdown", + "type": "shell", + "args": [], + "command": "${command:extension.markdown-pdf: Export (PDF)}", + + "command2": "markdown-it README.md -o README.html", + "presentation": { + "reveal": "never", + "panel": "shared", + }, + "problemMatcher": [] + } + ] +} \ No newline at end of file diff --git a/research/string-tables/CMakeLists.txt b/research/string-tables/CMakeLists.txt new file mode 100644 index 0000000..4faf650 --- /dev/null +++ b/research/string-tables/CMakeLists.txt @@ -0,0 +1,28 @@ +cmake_minimum_required(VERSION 3.5.0) + +# set the project name and version +project(StringsTableTest VERSION 1.0) + +# specify the C++ standard +set(CMAKE_CXX_STANDARD 14) +set(CMAKE_CXX_STANDARD_REQUIRED True) + +# add the Program library +add_library(Program program.cpp) + +# add the StringsTable library +add_library(StringsTable FixedStrings.cpp FixedStrings.inl) +target_include_directories(StringsTable PUBLIC build) + +# add the executable +add_executable(StringsTableTest main.cpp) +target_link_libraries(StringsTableTest PUBLIC Program StringsTable) + +# add generator to create the strings table +add_custom_command( + OUTPUT ${CMAKE_CURRENT_SOURCE_DIR}/FixedStrings.inl + COMMAND ${CMAKE_CXX_COMPILER} ../main.cpp $ -o dummy 2>&1 + | sed -n 's@.\*undefined.\*cFixedStringId_\\\([[:alnum:]_]\*\\\).\*@DEFINE_FIXED_STRING\(\\1\)@p' + | sort | uniq > FixedStrings.inl + DEPENDS Program +) diff --git a/research/string-tables/FixedStrings.cpp b/research/string-tables/FixedStrings.cpp new file mode 100644 index 0000000..8adc729 --- /dev/null +++ b/research/string-tables/FixedStrings.cpp @@ -0,0 +1,33 @@ +#include "FixedStrings.h" + + +#define DEFINE_FIXED_STRING(x) enumStringIdValue_##x, +enum StringIdsEnum +{ +#include "FixedStrings.inl" +enumStringId_Count +}; +#undef DEFINE_FIXED_STRING + + +#define DEFINE_FIXED_STRING(x) DECLARE_FIXED_STRING(x) = enumStringIdValue_##x; +#include "FixedStrings.inl" +#undef DEFINE_FIXED_STRING + + +#define DEFINE_FIXED_STRING(x) case enumStringIdValue_##x: return #x; +const char* FixedStringFromId(int aStringId) +{ + switch (aStringId) + { +#include "FixedStrings.inl" + } + return "null"; +} +#undef DEFINE_FIXED_STRING + + +int StringTableSize() +{ + return enumStringId_Count; +} diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/Modules/ThreadBoundaryModule.cpp b/research/pipeline/Modules/ThreadBoundaryModule.cpp new file mode 100644 index 0000000..e4b07d4 --- /dev/null +++ b/research/pipeline/Modules/ThreadBoundaryModule.cpp @@ -0,0 +1,89 @@ + +/* + +class Consumer : public RoutingModule { +public: + Consumer( CommandQueue* b, Format format ) + : RoutingModule(), buffer( b ), formatId( format ) + { } + + void init() + { + } + + void start() + { + for (;;) { + const Command &command = buffer->remove(); + RoutingModule::command( command.command, command.arg ); + } + } + + const char* name() { return "Consumer"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue *buffer; + Format formatId; +}; + +class ConsumerThread : public Thread { +public: + ConsumerThread( Consumer *c ) + : consumer( c ) + { } + + void execute( void* ) + { + consumer->start(); + } + +private: + Consumer *consumer; +}; + + +class ThreadBoundryModule : public RoutingModule { +public: + ThreadBoundryModule( int size, Format format ) + : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ), + consumerThread( &consumer ), formatId( format ) + { + } + + void init() + { + } + + void connectTo( Module *m, const Frame &f ) + { + consumer.connectTo( m, f ); + consumerThread.start(0); + } + + void process( const Frame &frame ) + { + readCommandQueue.add( frame ); + } + + const char *name() { return "Thread Boundry Module"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue readCommandQueue; + Consumer consumer; + ConsumerThread consumerThread; + Format formatId; +}; + + +class ProcessBoundryThing : public DispatchInterface { +public: + void dispatch( Command *command ) + { + } +}; + +*/ diff --git a/research/pipeline/Modules/VideoCameraSourceModule.cpp b/research/pipeline/Modules/VideoCameraSourceModule.cpp new file mode 100644 index 0000000..deef2f9 --- /dev/null +++ b/research/pipeline/Modules/VideoCameraSourceModule.cpp @@ -0,0 +1,101 @@ + + +/* +class VideoCameraSourceModule : public SimpleModule { +public: + VideoCameraSourceModule() + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + AVFormatContext *avFormatContext = 0; + AVFormatParameters vp1, *vp = &vp1; + AVInputFormat *fmt1; + memset(vp, 0, sizeof(*vp)); + fmt1 = av_find_input_format("video4linux");//video_grab_format); + vp->device = 0;//"/dev/video";//video_device; + vp->channel = 0;//video_channel; + vp->standard = "pal";//"ntsc";//video_standard; + vp->width = WIDTH; + vp->height = HEIGHT; + vp->frame_rate = 50; + vp->frame_rate_base = 1; + if (av_open_input_file(&avFormatContext, "", fmt1, 0, vp) < 0) { + printf("Could not find video grab device\n"); + exit(1); + } + if ((avFormatContext->ctx_flags & AVFMTCTX_NOHEADER) && av_find_stream_info(avFormatContext) < 0) { + printf("Could not find video grab parameters\n"); + exit(1); + } + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + +// AVCodecContext *videoCodecContext = avcodec_alloc_context(); + AVCodecContext *videoCodecContext = &avFormatContext->streams[0]->codec; + AVCodec *codec = avcodec_find_decoder(avFormatContext->streams[0]->codec.codec_id); + + if ( !codec ) { + printf("error finding decoder\n"); + return; + } + + printf("found decoder: %s\n", codec->name); + + avFormatContext->streams[0]->r_frame_rate = vp->frame_rate; + avFormatContext->streams[0]->r_frame_rate_base = vp->frame_rate_base; + + videoCodecContext->width = vp->width; + videoCodecContext->height = vp->height; + +// if ( avcodec_open( videoCodecContext, &rawvideo_decoder ) < 0 ) { + if ( avcodec_open( videoCodecContext, codec ) < 0 ) { // is rawvideo_decoder + printf("error opening context\n"); + videoCodecContext = 0; + } + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + AVPacket pkt; + while( avFormatContext ) { + if ( av_read_frame(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + AVFrame *picture = avcodec_alloc_frame(); + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = picture; + Frame *currentFrame = new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + currentFrame->ref(); + + int gotPicture = 0; + avcodec_decode_video( videoCodecContext, picture, &gotPicture, pkt.data, pkt.size ); + + if ( gotPicture ) { + yuvFrame->fmt = videoCodecContext->pix_fmt; // is PIX_FMT_YUV422 + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; +// printf("showing frame: %i %ix%i\n", yuvFrame->fmt, yuvFrame->width, yuvFrame->height ); + SimpleModule::process( *currentFrame ); + } + } + } + } + + const char *name() { return "Video Camera Source"; } + Format inputFormat() { return "FRAME_ID_VIDEO_CAMERA_SOURCE"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } +}; +*/ + diff --git a/research/pipeline/PipelineManager.cpp b/research/pipeline/PipelineManager.cpp new file mode 100644 index 0000000..e003559 --- /dev/null +++ b/research/pipeline/PipelineManager.cpp @@ -0,0 +1,162 @@ + +class PipelineManager : public Thread { +public: + PipelineManager(); + void addSource( Format frameType ); + void addDestination( Format frameType ); + void clearTargets(); + void connectTogether(Module *m1, Module *m2, const Frame &f); + void makeConnections(Module *start); + void start( Frame *frame ) { Thread::start( (void *)frame ); } + void execute( void *p ); + void unconnectedRoute( Module *m, const Frame &f ); +private: + std::list sourceModules; + std::list destinationModules; + std::list source; + std::list destination; +}; + + +PipelineManager *pipelineMgr = 0; + + +PipelineManager::PipelineManager() +{ +} + +/* +void PipelineManager::newModule( Module *m ) +{ + printf("adding module: %s\n", m->name() ); + + allModules.push_front( m ); + + // update source modules list + for ( list::iterator it = source.begin(); it != source.end(); ++it ) { + if ( (*it) == m->inputFormat() ) { + sourceModules.push_front( m ); + // Just add it once + break; + } + } + + // update destination modules list + for ( list::iterator it = destination.begin(); it != destination.end(); ++it ) { + if ( (*it) == m->outputFormat() ) { + destinationModules.push_front( m ); + // Just add it once + break; + } + } +} +*/ + +void PipelineManager::addSource( Format frameType ) +{ + // update source modules list + Module *m = moduleMapper()->findModuleWithInputFormat( frameType ); + if ( m ) { + printf("adding source: %s\n", (const char *)frameType ); + source.push_front( frameType ); + sourceModules.push_front( m ); + } else { + printf("No source for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::addDestination( Format frameType ) +{ + Module *m = moduleMapper()->findModuleWithOutputFormat( frameType ); + if ( m ) { + printf("adding destination: %s\n", (const char *)frameType ); + destination.push_front( frameType ); + destinationModules.push_front( m ); + } else { + printf("No destination for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::clearTargets() +{ + sourceModules.clear(); + destinationModules.clear(); + source.clear(); + destination.clear(); +} + +void PipelineManager::connectTogether( Module *m1, Module *m2, const Frame &f ) +{ +/* + //printf(" [%s] -> [%s] %s", m1->outputFormat(), m2->inputFormat(), m2->name() ); + printf(" -> %s", m2->name() ); + + staticDispatch( m2, Init, 0 ); + + if ( m2->isBlocking() || m1->isBlocking() ) { + ThreadBoundryModule *threadModule = new ThreadBoundryModule( 32, m2->inputFormat() ); + threadModule->init(); + m1->connectTo( threadModule, f ); + threadModule->connectTo( m2, f ); + } else { + m1->connectTo( m2, f ); + } +*/ +} + +/* + Connects together module with a module that can process the frame + and then gets the module to process this first frame +*/ +void PipelineManager::unconnectedRoute( Module *m, const Frame &f ) +{ + Module *m2 = moduleMapper()->findModuleWithInputFormat( f.id() ); + if ( m2 ) { + //connectTogether( m, m2, f ); + printf("Connecting together: %s -> %s\n", m->name(), m2->name() ); + staticDispatch( m2, Init, 0 ); + m->connectTo( m2, f ); + m2->connectedFrom( m, f ); + staticDispatch( m2, Process, &f ); + } else { + printf("Didn't find route for %s\n", m->name()); + } +} + +void PipelineManager::makeConnections( Module *start ) +{ +/* + printf("making connections:\n"); + + Frame frame( "UNKNOWN", 0 ); + Module *currentModule = start; + Format dstFmt = destination.front(); + + dispatch( currentModule, Init, 0 ); + printf(" %s (pid: %i)", currentModule->name(), getpid() ); + + while ( currentModule->outputFormat() != dstFmt ) { + Module *m = moduleMapper()->findModuleWithInputFormat( currentModule->outputFormat() ); + if ( m ) { + connectTogether( currentModule, m, frame ); + currentModule = m; + } else { + break; + } + } + printf("\n"); +*/ +} + + +void PipelineManager::execute( void *d ) +{ + printf("starting...\n"); + for ( list::iterator it = sourceModules.begin(); it != sourceModules.end(); ++it ) { + //makeConnections( (*it) ); + staticDispatch( (*it), Init, 0 ); + staticDispatch( (*it), Process, d ); + } +} + + diff --git a/research/pipeline/README.md b/research/pipeline/README.md new file mode 100644 index 0000000..8df026f --- /dev/null +++ b/research/pipeline/README.md @@ -0,0 +1,30 @@ + + +Example sources to support: + +file:/home/user/Documents/images/jpeg/picture.jpg +file:/home/user/Documents/audio/mpeg/greatestsong.mp3 +file:/home/user/Documents/application/playlist/favourites.pls +file:/home/user/Documents/application/playlist/favourites.mpu +http://www.slashdot.org/somefile.mpg +http://www.streaming_radio_server.net:9000 +http://www.streaming_tv_server.net:9000 +camera +microphone +camera & microphone + + +Example outputs to support: + +File/URL +UDP packets +TCP/IP packets +OSS +Alsa +QSS +Visualiser +QDirectPainter +QPainter +XShm +DirectDraw +YUV acceleration diff --git a/research/pipeline/Types/Deadcode.cpp b/research/pipeline/Types/Deadcode.cpp new file mode 100644 index 0000000..d08e52a --- /dev/null +++ b/research/pipeline/Types/Deadcode.cpp @@ -0,0 +1,140 @@ + + +#if 0 + +1 = registerNewFormat("AAC", ".aac", "An AAC decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MP3 decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MAD decoder", AUDIO_CODEC); +1 = registerNewFormat("AAC", ".aac", "My AAC decoder", AUDIO_CODEC); +3 = registerNewFormat("3DS", ".3ds", "3D Studio File", AUDIO_CODEC); + +enum FormatType { + FRAME_ID_FILE_PROTO, + FRAME_ID_HTTP_PROTO, + FRAME_ID_RTSP_PROTO, + FRAME_ID_RTP_PROTO, + FRAME_ID_MMS_PROTO, + + FRAME_ID_GIF_FORMAT, + FRAME_ID_JPG_FORMAT, + FRAME_ID_PNG_FORMAT, + + FRAME_ID_MP3_FORMAT, + FRAME_ID_WAV_FORMAT, + FRAME_ID_GSM_FORMAT, + FRAME_ID_AMR_FORMAT, + + FRAME_ID_MPG_FORMAT, + FRAME_ID_AVI_FORMAT, + FRAME_ID_MP4_FORMAT, + FRAME_ID_MOV_FORMAT, + + FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG_AUDIO_PACKET = FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET, + FRAME_ID_GSM_AUDIO_PACKET, + FRAME_ID_AMR_AUDIO_PACKET, + FRAME_ID_AAC_AUDIO_PACKET, + FRAME_ID_LAST_PACKET_TYPE = FRAME_ID_AMR_AUDIO_PACKET, + + FRAME_ID_VIDEO_PACKET, + FRAME_ID_AUDIO_PACKET, + + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME, + + FRAME_ID_PCM_AUDIO_DATA, + + FRAME_ID_RENDERED_AUDIO, + FRAME_ID_RENDERED_VIDEO, + + FRAME_ID_URL_SOURCE, + FRAME_ID_AUDIO_SOURCE, + FRAME_ID_VIDEO_SOURCE, + + FRAME_ID_MULTIPLE_FORMAT, + FRAME_ID_ANY_ONE_OF_FORMAT, + + FRAME_ID_MULTIPLE_PACKET, + FRAME_ID_ANY_ONE_OF_PACKET, + + FRAME_ID_UNKNOWN +}; + +typedef struct FRAME_GENERIC { +/* + int generalId; + int specificId; + int streamId; +*/ + int bytes; + char* bits; + int pts; +}; + +enum videoCodecId { + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET +}; + +typedef struct FRAME_VIDEO_PACKET { + int codecId; + int bytes; + char* bits; +}; + +enum videoFrameFormat { + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME +}; + +typedef struct FRAME_VIDEO_FRAME { + int format; + int width; + int height; + int bytes; + char* bits; +}; + +struct UpPCMPacket { + int freq; + int bitsPerSample; + int size; + char data[1]; +}; + +struct DownPCMPacket { + +}; + +#endif + + + +/* +struct StreamPacket { + void *private; // AVPacket *pkt; + int streamId; + int size; + char *data; +}; +*/ + +/* +struct StreamPacket { + int streamId; + Frame frame; +}; +*/ + diff --git a/research/pipeline/Types/Format.hpp b/research/pipeline/Types/Format.hpp new file mode 100644 index 0000000..72642b6 --- /dev/null +++ b/research/pipeline/Types/Format.hpp @@ -0,0 +1,29 @@ +#pragma once +#include + +// Format +class Format +{ +public: + Format() : s(nullptr) { } + Format(const Format &other) : s( other.s ) { } + Format(const char *str) : s( str ) { } + bool operator==(const Format& other) + { + return !std::strcmp(other.s, s); + } + operator const char *() + { + return s; + } + bool operator==(const char *other) + { + return !std::strcmp(s, other); + } + bool operator<(const Format& other) const + { + return std::strcmp(other.s, s) < 0; + } +private: + const char *s; +}; diff --git a/research/pipeline/Types/Frame.hpp b/research/pipeline/Types/Frame.hpp new file mode 100644 index 0000000..35ddb08 --- /dev/null +++ b/research/pipeline/Types/Frame.hpp @@ -0,0 +1,51 @@ +#pragma once +#include +#include "Format.hpp" + +// Frame +class Frame +{ +public: + Frame() { } + + Frame( const char* id, void* data ) + : counter( 0 ) + , type( id ) + , bits( data ) + { + pthread_mutex_init( &mutex, NULL ); + } + + void ref() const + { + pthread_mutex_lock( &mutex ); + ++counter; + pthread_mutex_unlock( &mutex ); + } + + void deref() const + { + pthread_mutex_lock( &mutex ); + --counter; + pthread_mutex_unlock( &mutex ); + } + + int refcount() const + { + int ret; + pthread_mutex_lock( &mutex ); + ret = counter; + pthread_mutex_unlock( &mutex ); + return ret; + } + + Format id() const { return type; } + void* data() const { return bits; } + +private: + mutable pthread_mutex_t mutex; + mutable int counter; + Format type; + void *bits; +}; + diff --git a/research/pipeline/Types/Module.hpp b/research/pipeline/Types/Module.hpp new file mode 100644 index 0000000..f0ad0fc --- /dev/null +++ b/research/pipeline/Types/Module.hpp @@ -0,0 +1,118 @@ +#pragma once +#include +#include +#include +#include "Frame.hpp" +#include "Format.hpp" + +class Module; + +enum Commands { Init, Pull, Deref, Process, Simulate, ConnectToModule, ConnectedFrom }; + +typedef Module *Address; + +struct Command { + Address address; + Commands command; + const void *arg; +}; + +// CommandQueue +class CommandQueue { +public: + CommandQueue( int size ); + + void add( const Command & ); + const Command &remove(); + +private: + int max; + const Command **commands; + int in, out; + + pthread_mutex_t mutex; + sem_t free; + sem_t used; +}; + +CommandQueue::CommandQueue( int size ) + : max( size ), in( 0 ), out( 0 ) +{ + commands = new const Command*[max]; + pthread_mutex_init( &mutex, NULL ); + sem_init( &free, 0, max ); + sem_init( &used, 0, 0 ); +} + +void CommandQueue::add( const Command &command ) +{ + while( sem_wait( &free ) != 0 ); + pthread_mutex_lock( &mutex ); + + commands[in] = &command; + in = ( in + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &used ); +} + +const Command &CommandQueue::remove() +{ + while( sem_wait( &used ) != 0 ); + pthread_mutex_lock( &mutex ); + + const Command *command = commands[out]; + out = ( out + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &free ); + + return *command; +} + + + +class ModuleFactory { +public: + ModuleFactory() { } + + virtual const char *name() = 0; + + virtual std::list
threadAffinity() = 0; + virtual bool isBlocking() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; + virtual bool supportsInputFormat( Format ) = 0; + virtual bool supportsOutputFormat( Format ) = 0; + + virtual Module *createInstance() = 0; +}; + + + +// Modules +class Module { +public: + Module() { } + + virtual const char *name() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; +// virtual bool constFrameProcessing() = 0; + +// virtual bool supportsInputType( Format ) = 0; + virtual bool supportsOutputType( Format ) = 0; + +// virtual list inputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } +// virtual list outputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } + + virtual bool isBlocking() = 0;//{ return false; } + virtual std::list
threadAffinity() = 0; + +// virtual void command( Command command, const void *arg, bool priorityFlag ) = 0; + virtual void command( Commands command, const void *arg ) = 0; + + virtual void connectTo( Module *next, const Frame &f ) = 0; + virtual void connectedFrom( Module *next, const Frame &f ) = 0; +}; + diff --git a/research/pipeline/Types/PCMData.hpp b/research/pipeline/Types/PCMData.hpp new file mode 100644 index 0000000..57de038 --- /dev/null +++ b/research/pipeline/Types/PCMData.hpp @@ -0,0 +1,7 @@ +#pragma once + +struct PCMData +{ + int size; + char data[65536]; +}; diff --git a/research/pipeline/Types/Thread.hpp b/research/pipeline/Types/Thread.hpp new file mode 100644 index 0000000..d7922a2 --- /dev/null +++ b/research/pipeline/Types/Thread.hpp @@ -0,0 +1,41 @@ +#pragma once + +// Utils +class Thread { +public: + Thread(); + int start( void* arg ); + +protected: + int run( void* arg ); + static void* entryPoint( void* ); + virtual void setup() { }; + virtual void execute( void* ) = 0; + void* arg() const { return arg_; } + void setArg( void* a ) { arg_ = a; } + +private: + pthread_t tid_; + void* arg_; +}; + +Thread::Thread() {} + +int Thread::start( void* arg ) +{ + setArg(arg); + return pthread_create( &tid_, 0, Thread::entryPoint, this ); +} + +int Thread::run( void* arg ) +{ + printf(" (pid: %i)", getpid() ); + setup(); + execute( arg ); +} + +void* Thread::entryPoint( void* pthis ) +{ + Thread* pt = (Thread*)pthis; + pt->run( pt->arg() ); +} diff --git a/research/pipeline/Types/YUVFrame.hpp b/research/pipeline/Types/YUVFrame.hpp new file mode 100644 index 0000000..109f9a4 --- /dev/null +++ b/research/pipeline/Types/YUVFrame.hpp @@ -0,0 +1,16 @@ +#pragma once +#include "libavcodec/avcodec.h" + + +struct YUVFrame { + int width; + int height; + enum AVPixelFormat fmt; + AVFrame *pic; +/* + uchar *y; + uchar *u; + uchar *v; + int scanlineWidth[3]; +*/ +}; diff --git a/research/pipeline/prototype.cpp b/research/pipeline/prototype.cpp new file mode 100755 index 0000000..a0c03f9 --- /dev/null +++ b/research/pipeline/prototype.cpp @@ -0,0 +1,172 @@ +/* + + Project Carmack 0.01 (AKA Media Library Prototype 01/02) + Copyright John Ryland, 2005 + +*/ + +using namespace std; + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "Types/Frame.hpp" +#include "Types/Thread.hpp" +#include "Types/Module.hpp" + + +#define WIDTH 160 +#define HEIGHT 120 + + + + +/* +class ModulesThread : public Thread, public DispatchInterface { +public: + void execute( void* ) + { + for (;;) { + CommandStruct *command = buffer.remove(); + command->module->command( command->command, command->arg ); + } + } + + void dispatch( CommandStruct *command ) + { + buffer.add( command ); + } + +private: + CommandQueue buffer; +}; +*/ + + + + +static void staticDispatch( Address address, Commands command, const void *arg ) +{ + moduleMapper()->dispatchCommand( address, command, arg ); +} + + + + +struct FFMpegStreamPacket { + AVPacket *packet; +}; + + + + + +void ProcessMessages(); + + + + + + + +Module *a, *b, *c, *d; + + +void registerModules() +{ + moduleMapper()->addModule( new OSSRenderer ); +// moduleMapper()->addModule( d = new YUVRenderer ); + moduleMapper()->addModule( d = new DirectDrawRenderer ); + moduleMapper()->addModule( new MP3DecodeModule ); +// moduleMapper()->addModule( new FFMpegMuxModule ); + moduleMapper()->addModule( new MpegDecodeModule ); +// moduleMapper()->addModule( new MP3SourceModule ); +// moduleMapper()->addModule( new StreamDemuxModule ); + moduleMapper()->addModule( c = new MpegEncodeModule ); +// moduleMapper()->addModule( b = new Splitter ); + moduleMapper()->addModule( new FFMpegSourceModule ); +// moduleMapper()->addModule( a = new VideoCameraSourceModule ); +} + +void playFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_AUDIO" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + //pipelineMgr->start( &file ); + pipelineMgr->execute( &file ); +} + + +void displayCamera() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +void reEncodeFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + pipelineMgr->start( &file ); +} + +void recordVideo() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +int main( int argc, char** argv ) +{ + registerModules(); + pipelineMgr = new PipelineManager; +/* + Frame f; + printf("Connecting together: %s -> %s\n", a->name(), b->name() ); + staticDispatch( b, Init, 0 ); + a->connectTo( b, f ); +// b->connectedFrom( a, f ); + + printf("Connecting together: %s -> %s\n", b->name(), c->name() ); + staticDispatch( c, Init, 0 ); + b->connectTo( c, f ); + + printf("Connecting together: %s -> %s\n", b->name(), d->name() ); + staticDispatch( d, Init, 0 ); + b->connectTo( d, f ); +*/ + playFile( (argc > 1) ? argv[1] : "test.mpg" ); + //reEncodeFile( (argc > 1) ? argv[1] : "test.mpg" ); + //displayCamera(); + //recordVideo(); +} + diff --git a/research/string-tables/.gitignore b/research/string-tables/.gitignore new file mode 100644 index 0000000..7c6ad91 --- /dev/null +++ b/research/string-tables/.gitignore @@ -0,0 +1,61 @@ +build/cmake_install.cmake +build/CMakeCache.txt +build/compile_commands.json +build/FixedStrings.inl +build/libProgram.a +build/libStringsTable.a +build/Makefile +build/StringsTableTest +build/CMakeFiles/cmake.check_cache +build/CMakeFiles/CMakeDirectoryInformation.cmake +build/CMakeFiles/CMakeOutput.log +build/CMakeFiles/CMakeRuleHashes.txt +build/CMakeFiles/feature_tests.bin +build/CMakeFiles/feature_tests.c +build/CMakeFiles/feature_tests.cxx +build/CMakeFiles/Makefile.cmake +build/CMakeFiles/Makefile2 +build/CMakeFiles/progress.marks +build/CMakeFiles/TargetDirectories.txt +build/CMakeFiles/3.5.1/CMakeCCompiler.cmake +build/CMakeFiles/3.5.1/CMakeCXXCompiler.cmake +build/CMakeFiles/3.5.1/CMakeDetermineCompilerABI_C.bin +build/CMakeFiles/3.5.1/CMakeDetermineCompilerABI_CXX.bin +build/CMakeFiles/3.5.1/CMakeSystem.cmake +build/CMakeFiles/3.5.1/CompilerIdC/a.out +build/CMakeFiles/3.5.1/CompilerIdC/CMakeCCompilerId.c +build/CMakeFiles/3.5.1/CompilerIdCXX/a.out +build/CMakeFiles/3.5.1/CompilerIdCXX/CMakeCXXCompilerId.cpp +build/CMakeFiles/Program.dir/build.make +build/CMakeFiles/Program.dir/cmake_clean_target.cmake +build/CMakeFiles/Program.dir/cmake_clean.cmake +build/CMakeFiles/Program.dir/CXX.includecache +build/CMakeFiles/Program.dir/depend.internal +build/CMakeFiles/Program.dir/depend.make +build/CMakeFiles/Program.dir/DependInfo.cmake +build/CMakeFiles/Program.dir/flags.make +build/CMakeFiles/Program.dir/link.txt +build/CMakeFiles/Program.dir/program.cpp.o +build/CMakeFiles/Program.dir/progress.make +build/CMakeFiles/StringsTable.dir/build.make +build/CMakeFiles/StringsTable.dir/cmake_clean_target.cmake +build/CMakeFiles/StringsTable.dir/cmake_clean.cmake +build/CMakeFiles/StringsTable.dir/CXX.includecache +build/CMakeFiles/StringsTable.dir/depend.internal +build/CMakeFiles/StringsTable.dir/depend.make +build/CMakeFiles/StringsTable.dir/DependInfo.cmake +build/CMakeFiles/StringsTable.dir/FixedStrings.cpp.o +build/CMakeFiles/StringsTable.dir/flags.make +build/CMakeFiles/StringsTable.dir/link.txt +build/CMakeFiles/StringsTable.dir/progress.make +build/CMakeFiles/StringsTableTest.dir/build.make +build/CMakeFiles/StringsTableTest.dir/cmake_clean.cmake +build/CMakeFiles/StringsTableTest.dir/CXX.includecache +build/CMakeFiles/StringsTableTest.dir/depend.internal +build/CMakeFiles/StringsTableTest.dir/depend.make +build/CMakeFiles/StringsTableTest.dir/DependInfo.cmake +build/CMakeFiles/StringsTableTest.dir/flags.make +build/CMakeFiles/StringsTableTest.dir/link.txt +build/CMakeFiles/StringsTableTest.dir/main.cpp.o +build/CMakeFiles/StringsTableTest.dir/progress.make +README.pdf diff --git a/research/string-tables/.vscode/Code.code-workspace b/research/string-tables/.vscode/Code.code-workspace new file mode 100644 index 0000000..c7e938e --- /dev/null +++ b/research/string-tables/.vscode/Code.code-workspace @@ -0,0 +1,49 @@ +{ + "folders": [ + { + "path": ".." + }, + { + "path": "../../framework" + } + ], + "settings": { + "files.associations": { + "*.tpp": "cpp", + "functional": "cpp", + "optional": "cpp", + "array": "cpp", + "*.tcc": "cpp", + "cctype": "cpp", + "clocale": "cpp", + "cmath": "cpp", + "cstdarg": "cpp", + "cstdint": "cpp", + "cstdio": "cpp", + "cstdlib": "cpp", + "cwchar": "cpp", + "cwctype": "cpp", + "deque": "cpp", + "unordered_map": "cpp", + "vector": "cpp", + "exception": "cpp", + "algorithm": "cpp", + "system_error": "cpp", + "tuple": "cpp", + "type_traits": "cpp", + "fstream": "cpp", + "initializer_list": "cpp", + "iosfwd": "cpp", + "istream": "cpp", + "limits": "cpp", + "new": "cpp", + "ostream": "cpp", + "numeric": "cpp", + "sstream": "cpp", + "stdexcept": "cpp", + "streambuf": "cpp", + "utility": "cpp", + "typeinfo": "cpp" + } + } +} \ No newline at end of file diff --git a/research/string-tables/.vscode/c_cpp_properties.json b/research/string-tables/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..1b72752 --- /dev/null +++ b/research/string-tables/.vscode/c_cpp_properties.json @@ -0,0 +1,42 @@ +{ + "configurations": [ + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**", + "/usr/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + }, + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "gcc-x64", + "compileCommands": "${workspaceFolder}/build/compile_commands.json" + }, + { + "name": "Mac", + "includePath": [ + "/usr/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + } + }, + { + "name": "Win32", + "includePath": [ + "c:/Program Files (x86)/Microsoft Visual Studio 14.0/VC/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/string-tables/.vscode/launch.json b/research/string-tables/.vscode/launch.json new file mode 100644 index 0000000..ff5abd3 --- /dev/null +++ b/research/string-tables/.vscode/launch.json @@ -0,0 +1,32 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "(gdb) Launch", + "type": "cppdbg", + "request": "launch", + "program": "${workspaceFolder}/build/StringsTableTest", + "args": [], + "stopAtEntry": false, + "cwd": "${workspaceFolder}", + "environment": [ + { + "name": "LD_LIBRARY_PATH", + "value": "" + } + ], + "externalConsole": false, + "MIMode": "gdb", + "setupCommands": [ + { + "description": "Enable pretty-printing for gdb", + "text": "-enable-pretty-printing", + "ignoreFailures": true + } + ] + } + ] +} \ No newline at end of file diff --git a/research/string-tables/.vscode/tasks.json b/research/string-tables/.vscode/tasks.json new file mode 100644 index 0000000..64a18e6 --- /dev/null +++ b/research/string-tables/.vscode/tasks.json @@ -0,0 +1,41 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "Build C++ project", + "type": "shell", + "group": "build", + "command": "cd ./build && make", + "problemMatcher": [] + }, + { + "label": "Build & run C++ project", + "type": "shell", + "group": { + "kind": "build", + "isDefault": true + }, + "command": "cd ./build && make && ./StringsTableTest", + "problemMatcher": [] + }, + { + "label": "Build CMake", + "type": "shell", + "group": "build", + "command": "cd ./build && cmake build .." + }, + { + "label": "Compile Markdown", + "type": "shell", + "args": [], + "command": "${command:extension.markdown-pdf: Export (PDF)}", + + "command2": "markdown-it README.md -o README.html", + "presentation": { + "reveal": "never", + "panel": "shared", + }, + "problemMatcher": [] + } + ] +} \ No newline at end of file diff --git a/research/string-tables/CMakeLists.txt b/research/string-tables/CMakeLists.txt new file mode 100644 index 0000000..4faf650 --- /dev/null +++ b/research/string-tables/CMakeLists.txt @@ -0,0 +1,28 @@ +cmake_minimum_required(VERSION 3.5.0) + +# set the project name and version +project(StringsTableTest VERSION 1.0) + +# specify the C++ standard +set(CMAKE_CXX_STANDARD 14) +set(CMAKE_CXX_STANDARD_REQUIRED True) + +# add the Program library +add_library(Program program.cpp) + +# add the StringsTable library +add_library(StringsTable FixedStrings.cpp FixedStrings.inl) +target_include_directories(StringsTable PUBLIC build) + +# add the executable +add_executable(StringsTableTest main.cpp) +target_link_libraries(StringsTableTest PUBLIC Program StringsTable) + +# add generator to create the strings table +add_custom_command( + OUTPUT ${CMAKE_CURRENT_SOURCE_DIR}/FixedStrings.inl + COMMAND ${CMAKE_CXX_COMPILER} ../main.cpp $ -o dummy 2>&1 + | sed -n 's@.\*undefined.\*cFixedStringId_\\\([[:alnum:]_]\*\\\).\*@DEFINE_FIXED_STRING\(\\1\)@p' + | sort | uniq > FixedStrings.inl + DEPENDS Program +) diff --git a/research/string-tables/FixedStrings.cpp b/research/string-tables/FixedStrings.cpp new file mode 100644 index 0000000..8adc729 --- /dev/null +++ b/research/string-tables/FixedStrings.cpp @@ -0,0 +1,33 @@ +#include "FixedStrings.h" + + +#define DEFINE_FIXED_STRING(x) enumStringIdValue_##x, +enum StringIdsEnum +{ +#include "FixedStrings.inl" +enumStringId_Count +}; +#undef DEFINE_FIXED_STRING + + +#define DEFINE_FIXED_STRING(x) DECLARE_FIXED_STRING(x) = enumStringIdValue_##x; +#include "FixedStrings.inl" +#undef DEFINE_FIXED_STRING + + +#define DEFINE_FIXED_STRING(x) case enumStringIdValue_##x: return #x; +const char* FixedStringFromId(int aStringId) +{ + switch (aStringId) + { +#include "FixedStrings.inl" + } + return "null"; +} +#undef DEFINE_FIXED_STRING + + +int StringTableSize() +{ + return enumStringId_Count; +} diff --git a/research/string-tables/FixedStrings.h b/research/string-tables/FixedStrings.h new file mode 100644 index 0000000..7b86833 --- /dev/null +++ b/research/string-tables/FixedStrings.h @@ -0,0 +1,18 @@ +#pragma once +#ifndef FIXED_STRINGS_H +#define FIXED_STRINGS_H + +// A 'fixed-string' is a compile time string which is +// stored in the read only section of the executable +// and is available as an int. The mapping is fixed +// at compile time. The retrieval of the static string +// that the id maps to is thread safe. + +#define FIXED_STRING_ID(x) cFixedStringId_##x +#define DECLARE_FIXED_STRING(x) extern const int FIXED_STRING_ID(x) + +// Thread-safe +extern const char* FixedStringFromId(int aFixedStringId); +extern int StringTableSize(); + +#endif // FIXED_STRINGS_H diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/Modules/ThreadBoundaryModule.cpp b/research/pipeline/Modules/ThreadBoundaryModule.cpp new file mode 100644 index 0000000..e4b07d4 --- /dev/null +++ b/research/pipeline/Modules/ThreadBoundaryModule.cpp @@ -0,0 +1,89 @@ + +/* + +class Consumer : public RoutingModule { +public: + Consumer( CommandQueue* b, Format format ) + : RoutingModule(), buffer( b ), formatId( format ) + { } + + void init() + { + } + + void start() + { + for (;;) { + const Command &command = buffer->remove(); + RoutingModule::command( command.command, command.arg ); + } + } + + const char* name() { return "Consumer"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue *buffer; + Format formatId; +}; + +class ConsumerThread : public Thread { +public: + ConsumerThread( Consumer *c ) + : consumer( c ) + { } + + void execute( void* ) + { + consumer->start(); + } + +private: + Consumer *consumer; +}; + + +class ThreadBoundryModule : public RoutingModule { +public: + ThreadBoundryModule( int size, Format format ) + : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ), + consumerThread( &consumer ), formatId( format ) + { + } + + void init() + { + } + + void connectTo( Module *m, const Frame &f ) + { + consumer.connectTo( m, f ); + consumerThread.start(0); + } + + void process( const Frame &frame ) + { + readCommandQueue.add( frame ); + } + + const char *name() { return "Thread Boundry Module"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue readCommandQueue; + Consumer consumer; + ConsumerThread consumerThread; + Format formatId; +}; + + +class ProcessBoundryThing : public DispatchInterface { +public: + void dispatch( Command *command ) + { + } +}; + +*/ diff --git a/research/pipeline/Modules/VideoCameraSourceModule.cpp b/research/pipeline/Modules/VideoCameraSourceModule.cpp new file mode 100644 index 0000000..deef2f9 --- /dev/null +++ b/research/pipeline/Modules/VideoCameraSourceModule.cpp @@ -0,0 +1,101 @@ + + +/* +class VideoCameraSourceModule : public SimpleModule { +public: + VideoCameraSourceModule() + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + AVFormatContext *avFormatContext = 0; + AVFormatParameters vp1, *vp = &vp1; + AVInputFormat *fmt1; + memset(vp, 0, sizeof(*vp)); + fmt1 = av_find_input_format("video4linux");//video_grab_format); + vp->device = 0;//"/dev/video";//video_device; + vp->channel = 0;//video_channel; + vp->standard = "pal";//"ntsc";//video_standard; + vp->width = WIDTH; + vp->height = HEIGHT; + vp->frame_rate = 50; + vp->frame_rate_base = 1; + if (av_open_input_file(&avFormatContext, "", fmt1, 0, vp) < 0) { + printf("Could not find video grab device\n"); + exit(1); + } + if ((avFormatContext->ctx_flags & AVFMTCTX_NOHEADER) && av_find_stream_info(avFormatContext) < 0) { + printf("Could not find video grab parameters\n"); + exit(1); + } + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + +// AVCodecContext *videoCodecContext = avcodec_alloc_context(); + AVCodecContext *videoCodecContext = &avFormatContext->streams[0]->codec; + AVCodec *codec = avcodec_find_decoder(avFormatContext->streams[0]->codec.codec_id); + + if ( !codec ) { + printf("error finding decoder\n"); + return; + } + + printf("found decoder: %s\n", codec->name); + + avFormatContext->streams[0]->r_frame_rate = vp->frame_rate; + avFormatContext->streams[0]->r_frame_rate_base = vp->frame_rate_base; + + videoCodecContext->width = vp->width; + videoCodecContext->height = vp->height; + +// if ( avcodec_open( videoCodecContext, &rawvideo_decoder ) < 0 ) { + if ( avcodec_open( videoCodecContext, codec ) < 0 ) { // is rawvideo_decoder + printf("error opening context\n"); + videoCodecContext = 0; + } + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + AVPacket pkt; + while( avFormatContext ) { + if ( av_read_frame(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + AVFrame *picture = avcodec_alloc_frame(); + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = picture; + Frame *currentFrame = new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + currentFrame->ref(); + + int gotPicture = 0; + avcodec_decode_video( videoCodecContext, picture, &gotPicture, pkt.data, pkt.size ); + + if ( gotPicture ) { + yuvFrame->fmt = videoCodecContext->pix_fmt; // is PIX_FMT_YUV422 + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; +// printf("showing frame: %i %ix%i\n", yuvFrame->fmt, yuvFrame->width, yuvFrame->height ); + SimpleModule::process( *currentFrame ); + } + } + } + } + + const char *name() { return "Video Camera Source"; } + Format inputFormat() { return "FRAME_ID_VIDEO_CAMERA_SOURCE"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } +}; +*/ + diff --git a/research/pipeline/PipelineManager.cpp b/research/pipeline/PipelineManager.cpp new file mode 100644 index 0000000..e003559 --- /dev/null +++ b/research/pipeline/PipelineManager.cpp @@ -0,0 +1,162 @@ + +class PipelineManager : public Thread { +public: + PipelineManager(); + void addSource( Format frameType ); + void addDestination( Format frameType ); + void clearTargets(); + void connectTogether(Module *m1, Module *m2, const Frame &f); + void makeConnections(Module *start); + void start( Frame *frame ) { Thread::start( (void *)frame ); } + void execute( void *p ); + void unconnectedRoute( Module *m, const Frame &f ); +private: + std::list sourceModules; + std::list destinationModules; + std::list source; + std::list destination; +}; + + +PipelineManager *pipelineMgr = 0; + + +PipelineManager::PipelineManager() +{ +} + +/* +void PipelineManager::newModule( Module *m ) +{ + printf("adding module: %s\n", m->name() ); + + allModules.push_front( m ); + + // update source modules list + for ( list::iterator it = source.begin(); it != source.end(); ++it ) { + if ( (*it) == m->inputFormat() ) { + sourceModules.push_front( m ); + // Just add it once + break; + } + } + + // update destination modules list + for ( list::iterator it = destination.begin(); it != destination.end(); ++it ) { + if ( (*it) == m->outputFormat() ) { + destinationModules.push_front( m ); + // Just add it once + break; + } + } +} +*/ + +void PipelineManager::addSource( Format frameType ) +{ + // update source modules list + Module *m = moduleMapper()->findModuleWithInputFormat( frameType ); + if ( m ) { + printf("adding source: %s\n", (const char *)frameType ); + source.push_front( frameType ); + sourceModules.push_front( m ); + } else { + printf("No source for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::addDestination( Format frameType ) +{ + Module *m = moduleMapper()->findModuleWithOutputFormat( frameType ); + if ( m ) { + printf("adding destination: %s\n", (const char *)frameType ); + destination.push_front( frameType ); + destinationModules.push_front( m ); + } else { + printf("No destination for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::clearTargets() +{ + sourceModules.clear(); + destinationModules.clear(); + source.clear(); + destination.clear(); +} + +void PipelineManager::connectTogether( Module *m1, Module *m2, const Frame &f ) +{ +/* + //printf(" [%s] -> [%s] %s", m1->outputFormat(), m2->inputFormat(), m2->name() ); + printf(" -> %s", m2->name() ); + + staticDispatch( m2, Init, 0 ); + + if ( m2->isBlocking() || m1->isBlocking() ) { + ThreadBoundryModule *threadModule = new ThreadBoundryModule( 32, m2->inputFormat() ); + threadModule->init(); + m1->connectTo( threadModule, f ); + threadModule->connectTo( m2, f ); + } else { + m1->connectTo( m2, f ); + } +*/ +} + +/* + Connects together module with a module that can process the frame + and then gets the module to process this first frame +*/ +void PipelineManager::unconnectedRoute( Module *m, const Frame &f ) +{ + Module *m2 = moduleMapper()->findModuleWithInputFormat( f.id() ); + if ( m2 ) { + //connectTogether( m, m2, f ); + printf("Connecting together: %s -> %s\n", m->name(), m2->name() ); + staticDispatch( m2, Init, 0 ); + m->connectTo( m2, f ); + m2->connectedFrom( m, f ); + staticDispatch( m2, Process, &f ); + } else { + printf("Didn't find route for %s\n", m->name()); + } +} + +void PipelineManager::makeConnections( Module *start ) +{ +/* + printf("making connections:\n"); + + Frame frame( "UNKNOWN", 0 ); + Module *currentModule = start; + Format dstFmt = destination.front(); + + dispatch( currentModule, Init, 0 ); + printf(" %s (pid: %i)", currentModule->name(), getpid() ); + + while ( currentModule->outputFormat() != dstFmt ) { + Module *m = moduleMapper()->findModuleWithInputFormat( currentModule->outputFormat() ); + if ( m ) { + connectTogether( currentModule, m, frame ); + currentModule = m; + } else { + break; + } + } + printf("\n"); +*/ +} + + +void PipelineManager::execute( void *d ) +{ + printf("starting...\n"); + for ( list::iterator it = sourceModules.begin(); it != sourceModules.end(); ++it ) { + //makeConnections( (*it) ); + staticDispatch( (*it), Init, 0 ); + staticDispatch( (*it), Process, d ); + } +} + + diff --git a/research/pipeline/README.md b/research/pipeline/README.md new file mode 100644 index 0000000..8df026f --- /dev/null +++ b/research/pipeline/README.md @@ -0,0 +1,30 @@ + + +Example sources to support: + +file:/home/user/Documents/images/jpeg/picture.jpg +file:/home/user/Documents/audio/mpeg/greatestsong.mp3 +file:/home/user/Documents/application/playlist/favourites.pls +file:/home/user/Documents/application/playlist/favourites.mpu +http://www.slashdot.org/somefile.mpg +http://www.streaming_radio_server.net:9000 +http://www.streaming_tv_server.net:9000 +camera +microphone +camera & microphone + + +Example outputs to support: + +File/URL +UDP packets +TCP/IP packets +OSS +Alsa +QSS +Visualiser +QDirectPainter +QPainter +XShm +DirectDraw +YUV acceleration diff --git a/research/pipeline/Types/Deadcode.cpp b/research/pipeline/Types/Deadcode.cpp new file mode 100644 index 0000000..d08e52a --- /dev/null +++ b/research/pipeline/Types/Deadcode.cpp @@ -0,0 +1,140 @@ + + +#if 0 + +1 = registerNewFormat("AAC", ".aac", "An AAC decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MP3 decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MAD decoder", AUDIO_CODEC); +1 = registerNewFormat("AAC", ".aac", "My AAC decoder", AUDIO_CODEC); +3 = registerNewFormat("3DS", ".3ds", "3D Studio File", AUDIO_CODEC); + +enum FormatType { + FRAME_ID_FILE_PROTO, + FRAME_ID_HTTP_PROTO, + FRAME_ID_RTSP_PROTO, + FRAME_ID_RTP_PROTO, + FRAME_ID_MMS_PROTO, + + FRAME_ID_GIF_FORMAT, + FRAME_ID_JPG_FORMAT, + FRAME_ID_PNG_FORMAT, + + FRAME_ID_MP3_FORMAT, + FRAME_ID_WAV_FORMAT, + FRAME_ID_GSM_FORMAT, + FRAME_ID_AMR_FORMAT, + + FRAME_ID_MPG_FORMAT, + FRAME_ID_AVI_FORMAT, + FRAME_ID_MP4_FORMAT, + FRAME_ID_MOV_FORMAT, + + FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG_AUDIO_PACKET = FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET, + FRAME_ID_GSM_AUDIO_PACKET, + FRAME_ID_AMR_AUDIO_PACKET, + FRAME_ID_AAC_AUDIO_PACKET, + FRAME_ID_LAST_PACKET_TYPE = FRAME_ID_AMR_AUDIO_PACKET, + + FRAME_ID_VIDEO_PACKET, + FRAME_ID_AUDIO_PACKET, + + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME, + + FRAME_ID_PCM_AUDIO_DATA, + + FRAME_ID_RENDERED_AUDIO, + FRAME_ID_RENDERED_VIDEO, + + FRAME_ID_URL_SOURCE, + FRAME_ID_AUDIO_SOURCE, + FRAME_ID_VIDEO_SOURCE, + + FRAME_ID_MULTIPLE_FORMAT, + FRAME_ID_ANY_ONE_OF_FORMAT, + + FRAME_ID_MULTIPLE_PACKET, + FRAME_ID_ANY_ONE_OF_PACKET, + + FRAME_ID_UNKNOWN +}; + +typedef struct FRAME_GENERIC { +/* + int generalId; + int specificId; + int streamId; +*/ + int bytes; + char* bits; + int pts; +}; + +enum videoCodecId { + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET +}; + +typedef struct FRAME_VIDEO_PACKET { + int codecId; + int bytes; + char* bits; +}; + +enum videoFrameFormat { + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME +}; + +typedef struct FRAME_VIDEO_FRAME { + int format; + int width; + int height; + int bytes; + char* bits; +}; + +struct UpPCMPacket { + int freq; + int bitsPerSample; + int size; + char data[1]; +}; + +struct DownPCMPacket { + +}; + +#endif + + + +/* +struct StreamPacket { + void *private; // AVPacket *pkt; + int streamId; + int size; + char *data; +}; +*/ + +/* +struct StreamPacket { + int streamId; + Frame frame; +}; +*/ + diff --git a/research/pipeline/Types/Format.hpp b/research/pipeline/Types/Format.hpp new file mode 100644 index 0000000..72642b6 --- /dev/null +++ b/research/pipeline/Types/Format.hpp @@ -0,0 +1,29 @@ +#pragma once +#include + +// Format +class Format +{ +public: + Format() : s(nullptr) { } + Format(const Format &other) : s( other.s ) { } + Format(const char *str) : s( str ) { } + bool operator==(const Format& other) + { + return !std::strcmp(other.s, s); + } + operator const char *() + { + return s; + } + bool operator==(const char *other) + { + return !std::strcmp(s, other); + } + bool operator<(const Format& other) const + { + return std::strcmp(other.s, s) < 0; + } +private: + const char *s; +}; diff --git a/research/pipeline/Types/Frame.hpp b/research/pipeline/Types/Frame.hpp new file mode 100644 index 0000000..35ddb08 --- /dev/null +++ b/research/pipeline/Types/Frame.hpp @@ -0,0 +1,51 @@ +#pragma once +#include +#include "Format.hpp" + +// Frame +class Frame +{ +public: + Frame() { } + + Frame( const char* id, void* data ) + : counter( 0 ) + , type( id ) + , bits( data ) + { + pthread_mutex_init( &mutex, NULL ); + } + + void ref() const + { + pthread_mutex_lock( &mutex ); + ++counter; + pthread_mutex_unlock( &mutex ); + } + + void deref() const + { + pthread_mutex_lock( &mutex ); + --counter; + pthread_mutex_unlock( &mutex ); + } + + int refcount() const + { + int ret; + pthread_mutex_lock( &mutex ); + ret = counter; + pthread_mutex_unlock( &mutex ); + return ret; + } + + Format id() const { return type; } + void* data() const { return bits; } + +private: + mutable pthread_mutex_t mutex; + mutable int counter; + Format type; + void *bits; +}; + diff --git a/research/pipeline/Types/Module.hpp b/research/pipeline/Types/Module.hpp new file mode 100644 index 0000000..f0ad0fc --- /dev/null +++ b/research/pipeline/Types/Module.hpp @@ -0,0 +1,118 @@ +#pragma once +#include +#include +#include +#include "Frame.hpp" +#include "Format.hpp" + +class Module; + +enum Commands { Init, Pull, Deref, Process, Simulate, ConnectToModule, ConnectedFrom }; + +typedef Module *Address; + +struct Command { + Address address; + Commands command; + const void *arg; +}; + +// CommandQueue +class CommandQueue { +public: + CommandQueue( int size ); + + void add( const Command & ); + const Command &remove(); + +private: + int max; + const Command **commands; + int in, out; + + pthread_mutex_t mutex; + sem_t free; + sem_t used; +}; + +CommandQueue::CommandQueue( int size ) + : max( size ), in( 0 ), out( 0 ) +{ + commands = new const Command*[max]; + pthread_mutex_init( &mutex, NULL ); + sem_init( &free, 0, max ); + sem_init( &used, 0, 0 ); +} + +void CommandQueue::add( const Command &command ) +{ + while( sem_wait( &free ) != 0 ); + pthread_mutex_lock( &mutex ); + + commands[in] = &command; + in = ( in + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &used ); +} + +const Command &CommandQueue::remove() +{ + while( sem_wait( &used ) != 0 ); + pthread_mutex_lock( &mutex ); + + const Command *command = commands[out]; + out = ( out + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &free ); + + return *command; +} + + + +class ModuleFactory { +public: + ModuleFactory() { } + + virtual const char *name() = 0; + + virtual std::list
threadAffinity() = 0; + virtual bool isBlocking() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; + virtual bool supportsInputFormat( Format ) = 0; + virtual bool supportsOutputFormat( Format ) = 0; + + virtual Module *createInstance() = 0; +}; + + + +// Modules +class Module { +public: + Module() { } + + virtual const char *name() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; +// virtual bool constFrameProcessing() = 0; + +// virtual bool supportsInputType( Format ) = 0; + virtual bool supportsOutputType( Format ) = 0; + +// virtual list inputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } +// virtual list outputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } + + virtual bool isBlocking() = 0;//{ return false; } + virtual std::list
threadAffinity() = 0; + +// virtual void command( Command command, const void *arg, bool priorityFlag ) = 0; + virtual void command( Commands command, const void *arg ) = 0; + + virtual void connectTo( Module *next, const Frame &f ) = 0; + virtual void connectedFrom( Module *next, const Frame &f ) = 0; +}; + diff --git a/research/pipeline/Types/PCMData.hpp b/research/pipeline/Types/PCMData.hpp new file mode 100644 index 0000000..57de038 --- /dev/null +++ b/research/pipeline/Types/PCMData.hpp @@ -0,0 +1,7 @@ +#pragma once + +struct PCMData +{ + int size; + char data[65536]; +}; diff --git a/research/pipeline/Types/Thread.hpp b/research/pipeline/Types/Thread.hpp new file mode 100644 index 0000000..d7922a2 --- /dev/null +++ b/research/pipeline/Types/Thread.hpp @@ -0,0 +1,41 @@ +#pragma once + +// Utils +class Thread { +public: + Thread(); + int start( void* arg ); + +protected: + int run( void* arg ); + static void* entryPoint( void* ); + virtual void setup() { }; + virtual void execute( void* ) = 0; + void* arg() const { return arg_; } + void setArg( void* a ) { arg_ = a; } + +private: + pthread_t tid_; + void* arg_; +}; + +Thread::Thread() {} + +int Thread::start( void* arg ) +{ + setArg(arg); + return pthread_create( &tid_, 0, Thread::entryPoint, this ); +} + +int Thread::run( void* arg ) +{ + printf(" (pid: %i)", getpid() ); + setup(); + execute( arg ); +} + +void* Thread::entryPoint( void* pthis ) +{ + Thread* pt = (Thread*)pthis; + pt->run( pt->arg() ); +} diff --git a/research/pipeline/Types/YUVFrame.hpp b/research/pipeline/Types/YUVFrame.hpp new file mode 100644 index 0000000..109f9a4 --- /dev/null +++ b/research/pipeline/Types/YUVFrame.hpp @@ -0,0 +1,16 @@ +#pragma once +#include "libavcodec/avcodec.h" + + +struct YUVFrame { + int width; + int height; + enum AVPixelFormat fmt; + AVFrame *pic; +/* + uchar *y; + uchar *u; + uchar *v; + int scanlineWidth[3]; +*/ +}; diff --git a/research/pipeline/prototype.cpp b/research/pipeline/prototype.cpp new file mode 100755 index 0000000..a0c03f9 --- /dev/null +++ b/research/pipeline/prototype.cpp @@ -0,0 +1,172 @@ +/* + + Project Carmack 0.01 (AKA Media Library Prototype 01/02) + Copyright John Ryland, 2005 + +*/ + +using namespace std; + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "Types/Frame.hpp" +#include "Types/Thread.hpp" +#include "Types/Module.hpp" + + +#define WIDTH 160 +#define HEIGHT 120 + + + + +/* +class ModulesThread : public Thread, public DispatchInterface { +public: + void execute( void* ) + { + for (;;) { + CommandStruct *command = buffer.remove(); + command->module->command( command->command, command->arg ); + } + } + + void dispatch( CommandStruct *command ) + { + buffer.add( command ); + } + +private: + CommandQueue buffer; +}; +*/ + + + + +static void staticDispatch( Address address, Commands command, const void *arg ) +{ + moduleMapper()->dispatchCommand( address, command, arg ); +} + + + + +struct FFMpegStreamPacket { + AVPacket *packet; +}; + + + + + +void ProcessMessages(); + + + + + + + +Module *a, *b, *c, *d; + + +void registerModules() +{ + moduleMapper()->addModule( new OSSRenderer ); +// moduleMapper()->addModule( d = new YUVRenderer ); + moduleMapper()->addModule( d = new DirectDrawRenderer ); + moduleMapper()->addModule( new MP3DecodeModule ); +// moduleMapper()->addModule( new FFMpegMuxModule ); + moduleMapper()->addModule( new MpegDecodeModule ); +// moduleMapper()->addModule( new MP3SourceModule ); +// moduleMapper()->addModule( new StreamDemuxModule ); + moduleMapper()->addModule( c = new MpegEncodeModule ); +// moduleMapper()->addModule( b = new Splitter ); + moduleMapper()->addModule( new FFMpegSourceModule ); +// moduleMapper()->addModule( a = new VideoCameraSourceModule ); +} + +void playFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_AUDIO" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + //pipelineMgr->start( &file ); + pipelineMgr->execute( &file ); +} + + +void displayCamera() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +void reEncodeFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + pipelineMgr->start( &file ); +} + +void recordVideo() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +int main( int argc, char** argv ) +{ + registerModules(); + pipelineMgr = new PipelineManager; +/* + Frame f; + printf("Connecting together: %s -> %s\n", a->name(), b->name() ); + staticDispatch( b, Init, 0 ); + a->connectTo( b, f ); +// b->connectedFrom( a, f ); + + printf("Connecting together: %s -> %s\n", b->name(), c->name() ); + staticDispatch( c, Init, 0 ); + b->connectTo( c, f ); + + printf("Connecting together: %s -> %s\n", b->name(), d->name() ); + staticDispatch( d, Init, 0 ); + b->connectTo( d, f ); +*/ + playFile( (argc > 1) ? argv[1] : "test.mpg" ); + //reEncodeFile( (argc > 1) ? argv[1] : "test.mpg" ); + //displayCamera(); + //recordVideo(); +} + diff --git a/research/string-tables/.gitignore b/research/string-tables/.gitignore new file mode 100644 index 0000000..7c6ad91 --- /dev/null +++ b/research/string-tables/.gitignore @@ -0,0 +1,61 @@ +build/cmake_install.cmake +build/CMakeCache.txt +build/compile_commands.json +build/FixedStrings.inl +build/libProgram.a +build/libStringsTable.a +build/Makefile +build/StringsTableTest +build/CMakeFiles/cmake.check_cache +build/CMakeFiles/CMakeDirectoryInformation.cmake +build/CMakeFiles/CMakeOutput.log +build/CMakeFiles/CMakeRuleHashes.txt +build/CMakeFiles/feature_tests.bin +build/CMakeFiles/feature_tests.c +build/CMakeFiles/feature_tests.cxx +build/CMakeFiles/Makefile.cmake +build/CMakeFiles/Makefile2 +build/CMakeFiles/progress.marks +build/CMakeFiles/TargetDirectories.txt +build/CMakeFiles/3.5.1/CMakeCCompiler.cmake +build/CMakeFiles/3.5.1/CMakeCXXCompiler.cmake +build/CMakeFiles/3.5.1/CMakeDetermineCompilerABI_C.bin +build/CMakeFiles/3.5.1/CMakeDetermineCompilerABI_CXX.bin +build/CMakeFiles/3.5.1/CMakeSystem.cmake +build/CMakeFiles/3.5.1/CompilerIdC/a.out +build/CMakeFiles/3.5.1/CompilerIdC/CMakeCCompilerId.c +build/CMakeFiles/3.5.1/CompilerIdCXX/a.out +build/CMakeFiles/3.5.1/CompilerIdCXX/CMakeCXXCompilerId.cpp +build/CMakeFiles/Program.dir/build.make +build/CMakeFiles/Program.dir/cmake_clean_target.cmake +build/CMakeFiles/Program.dir/cmake_clean.cmake +build/CMakeFiles/Program.dir/CXX.includecache +build/CMakeFiles/Program.dir/depend.internal +build/CMakeFiles/Program.dir/depend.make +build/CMakeFiles/Program.dir/DependInfo.cmake +build/CMakeFiles/Program.dir/flags.make +build/CMakeFiles/Program.dir/link.txt +build/CMakeFiles/Program.dir/program.cpp.o +build/CMakeFiles/Program.dir/progress.make +build/CMakeFiles/StringsTable.dir/build.make +build/CMakeFiles/StringsTable.dir/cmake_clean_target.cmake +build/CMakeFiles/StringsTable.dir/cmake_clean.cmake +build/CMakeFiles/StringsTable.dir/CXX.includecache +build/CMakeFiles/StringsTable.dir/depend.internal +build/CMakeFiles/StringsTable.dir/depend.make +build/CMakeFiles/StringsTable.dir/DependInfo.cmake +build/CMakeFiles/StringsTable.dir/FixedStrings.cpp.o +build/CMakeFiles/StringsTable.dir/flags.make +build/CMakeFiles/StringsTable.dir/link.txt +build/CMakeFiles/StringsTable.dir/progress.make +build/CMakeFiles/StringsTableTest.dir/build.make +build/CMakeFiles/StringsTableTest.dir/cmake_clean.cmake +build/CMakeFiles/StringsTableTest.dir/CXX.includecache +build/CMakeFiles/StringsTableTest.dir/depend.internal +build/CMakeFiles/StringsTableTest.dir/depend.make +build/CMakeFiles/StringsTableTest.dir/DependInfo.cmake +build/CMakeFiles/StringsTableTest.dir/flags.make +build/CMakeFiles/StringsTableTest.dir/link.txt +build/CMakeFiles/StringsTableTest.dir/main.cpp.o +build/CMakeFiles/StringsTableTest.dir/progress.make +README.pdf diff --git a/research/string-tables/.vscode/Code.code-workspace b/research/string-tables/.vscode/Code.code-workspace new file mode 100644 index 0000000..c7e938e --- /dev/null +++ b/research/string-tables/.vscode/Code.code-workspace @@ -0,0 +1,49 @@ +{ + "folders": [ + { + "path": ".." + }, + { + "path": "../../framework" + } + ], + "settings": { + "files.associations": { + "*.tpp": "cpp", + "functional": "cpp", + "optional": "cpp", + "array": "cpp", + "*.tcc": "cpp", + "cctype": "cpp", + "clocale": "cpp", + "cmath": "cpp", + "cstdarg": "cpp", + "cstdint": "cpp", + "cstdio": "cpp", + "cstdlib": "cpp", + "cwchar": "cpp", + "cwctype": "cpp", + "deque": "cpp", + "unordered_map": "cpp", + "vector": "cpp", + "exception": "cpp", + "algorithm": "cpp", + "system_error": "cpp", + "tuple": "cpp", + "type_traits": "cpp", + "fstream": "cpp", + "initializer_list": "cpp", + "iosfwd": "cpp", + "istream": "cpp", + "limits": "cpp", + "new": "cpp", + "ostream": "cpp", + "numeric": "cpp", + "sstream": "cpp", + "stdexcept": "cpp", + "streambuf": "cpp", + "utility": "cpp", + "typeinfo": "cpp" + } + } +} \ No newline at end of file diff --git a/research/string-tables/.vscode/c_cpp_properties.json b/research/string-tables/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..1b72752 --- /dev/null +++ b/research/string-tables/.vscode/c_cpp_properties.json @@ -0,0 +1,42 @@ +{ + "configurations": [ + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**", + "/usr/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + }, + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "gcc-x64", + "compileCommands": "${workspaceFolder}/build/compile_commands.json" + }, + { + "name": "Mac", + "includePath": [ + "/usr/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + } + }, + { + "name": "Win32", + "includePath": [ + "c:/Program Files (x86)/Microsoft Visual Studio 14.0/VC/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/string-tables/.vscode/launch.json b/research/string-tables/.vscode/launch.json new file mode 100644 index 0000000..ff5abd3 --- /dev/null +++ b/research/string-tables/.vscode/launch.json @@ -0,0 +1,32 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "(gdb) Launch", + "type": "cppdbg", + "request": "launch", + "program": "${workspaceFolder}/build/StringsTableTest", + "args": [], + "stopAtEntry": false, + "cwd": "${workspaceFolder}", + "environment": [ + { + "name": "LD_LIBRARY_PATH", + "value": "" + } + ], + "externalConsole": false, + "MIMode": "gdb", + "setupCommands": [ + { + "description": "Enable pretty-printing for gdb", + "text": "-enable-pretty-printing", + "ignoreFailures": true + } + ] + } + ] +} \ No newline at end of file diff --git a/research/string-tables/.vscode/tasks.json b/research/string-tables/.vscode/tasks.json new file mode 100644 index 0000000..64a18e6 --- /dev/null +++ b/research/string-tables/.vscode/tasks.json @@ -0,0 +1,41 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "Build C++ project", + "type": "shell", + "group": "build", + "command": "cd ./build && make", + "problemMatcher": [] + }, + { + "label": "Build & run C++ project", + "type": "shell", + "group": { + "kind": "build", + "isDefault": true + }, + "command": "cd ./build && make && ./StringsTableTest", + "problemMatcher": [] + }, + { + "label": "Build CMake", + "type": "shell", + "group": "build", + "command": "cd ./build && cmake build .." + }, + { + "label": "Compile Markdown", + "type": "shell", + "args": [], + "command": "${command:extension.markdown-pdf: Export (PDF)}", + + "command2": "markdown-it README.md -o README.html", + "presentation": { + "reveal": "never", + "panel": "shared", + }, + "problemMatcher": [] + } + ] +} \ No newline at end of file diff --git a/research/string-tables/CMakeLists.txt b/research/string-tables/CMakeLists.txt new file mode 100644 index 0000000..4faf650 --- /dev/null +++ b/research/string-tables/CMakeLists.txt @@ -0,0 +1,28 @@ +cmake_minimum_required(VERSION 3.5.0) + +# set the project name and version +project(StringsTableTest VERSION 1.0) + +# specify the C++ standard +set(CMAKE_CXX_STANDARD 14) +set(CMAKE_CXX_STANDARD_REQUIRED True) + +# add the Program library +add_library(Program program.cpp) + +# add the StringsTable library +add_library(StringsTable FixedStrings.cpp FixedStrings.inl) +target_include_directories(StringsTable PUBLIC build) + +# add the executable +add_executable(StringsTableTest main.cpp) +target_link_libraries(StringsTableTest PUBLIC Program StringsTable) + +# add generator to create the strings table +add_custom_command( + OUTPUT ${CMAKE_CURRENT_SOURCE_DIR}/FixedStrings.inl + COMMAND ${CMAKE_CXX_COMPILER} ../main.cpp $ -o dummy 2>&1 + | sed -n 's@.\*undefined.\*cFixedStringId_\\\([[:alnum:]_]\*\\\).\*@DEFINE_FIXED_STRING\(\\1\)@p' + | sort | uniq > FixedStrings.inl + DEPENDS Program +) diff --git a/research/string-tables/FixedStrings.cpp b/research/string-tables/FixedStrings.cpp new file mode 100644 index 0000000..8adc729 --- /dev/null +++ b/research/string-tables/FixedStrings.cpp @@ -0,0 +1,33 @@ +#include "FixedStrings.h" + + +#define DEFINE_FIXED_STRING(x) enumStringIdValue_##x, +enum StringIdsEnum +{ +#include "FixedStrings.inl" +enumStringId_Count +}; +#undef DEFINE_FIXED_STRING + + +#define DEFINE_FIXED_STRING(x) DECLARE_FIXED_STRING(x) = enumStringIdValue_##x; +#include "FixedStrings.inl" +#undef DEFINE_FIXED_STRING + + +#define DEFINE_FIXED_STRING(x) case enumStringIdValue_##x: return #x; +const char* FixedStringFromId(int aStringId) +{ + switch (aStringId) + { +#include "FixedStrings.inl" + } + return "null"; +} +#undef DEFINE_FIXED_STRING + + +int StringTableSize() +{ + return enumStringId_Count; +} diff --git a/research/string-tables/FixedStrings.h b/research/string-tables/FixedStrings.h new file mode 100644 index 0000000..7b86833 --- /dev/null +++ b/research/string-tables/FixedStrings.h @@ -0,0 +1,18 @@ +#pragma once +#ifndef FIXED_STRINGS_H +#define FIXED_STRINGS_H + +// A 'fixed-string' is a compile time string which is +// stored in the read only section of the executable +// and is available as an int. The mapping is fixed +// at compile time. The retrieval of the static string +// that the id maps to is thread safe. + +#define FIXED_STRING_ID(x) cFixedStringId_##x +#define DECLARE_FIXED_STRING(x) extern const int FIXED_STRING_ID(x) + +// Thread-safe +extern const char* FixedStringFromId(int aFixedStringId); +extern int StringTableSize(); + +#endif // FIXED_STRINGS_H diff --git a/research/string-tables/README.md b/research/string-tables/README.md new file mode 100644 index 0000000..b188761 --- /dev/null +++ b/research/string-tables/README.md @@ -0,0 +1,18 @@ + +StringsTableTest +---------------- + +A _fixed-string_ is a compile time string which is +stored in the read only section of the executable +and is available as an int. The mapping is fixed +at compile time. The retrieval of the static string +that the id maps to is thread safe. + +This test shows how this can be integrated with +cmake to be able to at compile time find all the +strings and place them in the strings table. + +This could be useful for a fixed size union that +contains various types as well as string ids which +refer to fixed strings. + diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/Modules/ThreadBoundaryModule.cpp b/research/pipeline/Modules/ThreadBoundaryModule.cpp new file mode 100644 index 0000000..e4b07d4 --- /dev/null +++ b/research/pipeline/Modules/ThreadBoundaryModule.cpp @@ -0,0 +1,89 @@ + +/* + +class Consumer : public RoutingModule { +public: + Consumer( CommandQueue* b, Format format ) + : RoutingModule(), buffer( b ), formatId( format ) + { } + + void init() + { + } + + void start() + { + for (;;) { + const Command &command = buffer->remove(); + RoutingModule::command( command.command, command.arg ); + } + } + + const char* name() { return "Consumer"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue *buffer; + Format formatId; +}; + +class ConsumerThread : public Thread { +public: + ConsumerThread( Consumer *c ) + : consumer( c ) + { } + + void execute( void* ) + { + consumer->start(); + } + +private: + Consumer *consumer; +}; + + +class ThreadBoundryModule : public RoutingModule { +public: + ThreadBoundryModule( int size, Format format ) + : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ), + consumerThread( &consumer ), formatId( format ) + { + } + + void init() + { + } + + void connectTo( Module *m, const Frame &f ) + { + consumer.connectTo( m, f ); + consumerThread.start(0); + } + + void process( const Frame &frame ) + { + readCommandQueue.add( frame ); + } + + const char *name() { return "Thread Boundry Module"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue readCommandQueue; + Consumer consumer; + ConsumerThread consumerThread; + Format formatId; +}; + + +class ProcessBoundryThing : public DispatchInterface { +public: + void dispatch( Command *command ) + { + } +}; + +*/ diff --git a/research/pipeline/Modules/VideoCameraSourceModule.cpp b/research/pipeline/Modules/VideoCameraSourceModule.cpp new file mode 100644 index 0000000..deef2f9 --- /dev/null +++ b/research/pipeline/Modules/VideoCameraSourceModule.cpp @@ -0,0 +1,101 @@ + + +/* +class VideoCameraSourceModule : public SimpleModule { +public: + VideoCameraSourceModule() + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + AVFormatContext *avFormatContext = 0; + AVFormatParameters vp1, *vp = &vp1; + AVInputFormat *fmt1; + memset(vp, 0, sizeof(*vp)); + fmt1 = av_find_input_format("video4linux");//video_grab_format); + vp->device = 0;//"/dev/video";//video_device; + vp->channel = 0;//video_channel; + vp->standard = "pal";//"ntsc";//video_standard; + vp->width = WIDTH; + vp->height = HEIGHT; + vp->frame_rate = 50; + vp->frame_rate_base = 1; + if (av_open_input_file(&avFormatContext, "", fmt1, 0, vp) < 0) { + printf("Could not find video grab device\n"); + exit(1); + } + if ((avFormatContext->ctx_flags & AVFMTCTX_NOHEADER) && av_find_stream_info(avFormatContext) < 0) { + printf("Could not find video grab parameters\n"); + exit(1); + } + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + +// AVCodecContext *videoCodecContext = avcodec_alloc_context(); + AVCodecContext *videoCodecContext = &avFormatContext->streams[0]->codec; + AVCodec *codec = avcodec_find_decoder(avFormatContext->streams[0]->codec.codec_id); + + if ( !codec ) { + printf("error finding decoder\n"); + return; + } + + printf("found decoder: %s\n", codec->name); + + avFormatContext->streams[0]->r_frame_rate = vp->frame_rate; + avFormatContext->streams[0]->r_frame_rate_base = vp->frame_rate_base; + + videoCodecContext->width = vp->width; + videoCodecContext->height = vp->height; + +// if ( avcodec_open( videoCodecContext, &rawvideo_decoder ) < 0 ) { + if ( avcodec_open( videoCodecContext, codec ) < 0 ) { // is rawvideo_decoder + printf("error opening context\n"); + videoCodecContext = 0; + } + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + AVPacket pkt; + while( avFormatContext ) { + if ( av_read_frame(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + AVFrame *picture = avcodec_alloc_frame(); + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = picture; + Frame *currentFrame = new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + currentFrame->ref(); + + int gotPicture = 0; + avcodec_decode_video( videoCodecContext, picture, &gotPicture, pkt.data, pkt.size ); + + if ( gotPicture ) { + yuvFrame->fmt = videoCodecContext->pix_fmt; // is PIX_FMT_YUV422 + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; +// printf("showing frame: %i %ix%i\n", yuvFrame->fmt, yuvFrame->width, yuvFrame->height ); + SimpleModule::process( *currentFrame ); + } + } + } + } + + const char *name() { return "Video Camera Source"; } + Format inputFormat() { return "FRAME_ID_VIDEO_CAMERA_SOURCE"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } +}; +*/ + diff --git a/research/pipeline/PipelineManager.cpp b/research/pipeline/PipelineManager.cpp new file mode 100644 index 0000000..e003559 --- /dev/null +++ b/research/pipeline/PipelineManager.cpp @@ -0,0 +1,162 @@ + +class PipelineManager : public Thread { +public: + PipelineManager(); + void addSource( Format frameType ); + void addDestination( Format frameType ); + void clearTargets(); + void connectTogether(Module *m1, Module *m2, const Frame &f); + void makeConnections(Module *start); + void start( Frame *frame ) { Thread::start( (void *)frame ); } + void execute( void *p ); + void unconnectedRoute( Module *m, const Frame &f ); +private: + std::list sourceModules; + std::list destinationModules; + std::list source; + std::list destination; +}; + + +PipelineManager *pipelineMgr = 0; + + +PipelineManager::PipelineManager() +{ +} + +/* +void PipelineManager::newModule( Module *m ) +{ + printf("adding module: %s\n", m->name() ); + + allModules.push_front( m ); + + // update source modules list + for ( list::iterator it = source.begin(); it != source.end(); ++it ) { + if ( (*it) == m->inputFormat() ) { + sourceModules.push_front( m ); + // Just add it once + break; + } + } + + // update destination modules list + for ( list::iterator it = destination.begin(); it != destination.end(); ++it ) { + if ( (*it) == m->outputFormat() ) { + destinationModules.push_front( m ); + // Just add it once + break; + } + } +} +*/ + +void PipelineManager::addSource( Format frameType ) +{ + // update source modules list + Module *m = moduleMapper()->findModuleWithInputFormat( frameType ); + if ( m ) { + printf("adding source: %s\n", (const char *)frameType ); + source.push_front( frameType ); + sourceModules.push_front( m ); + } else { + printf("No source for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::addDestination( Format frameType ) +{ + Module *m = moduleMapper()->findModuleWithOutputFormat( frameType ); + if ( m ) { + printf("adding destination: %s\n", (const char *)frameType ); + destination.push_front( frameType ); + destinationModules.push_front( m ); + } else { + printf("No destination for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::clearTargets() +{ + sourceModules.clear(); + destinationModules.clear(); + source.clear(); + destination.clear(); +} + +void PipelineManager::connectTogether( Module *m1, Module *m2, const Frame &f ) +{ +/* + //printf(" [%s] -> [%s] %s", m1->outputFormat(), m2->inputFormat(), m2->name() ); + printf(" -> %s", m2->name() ); + + staticDispatch( m2, Init, 0 ); + + if ( m2->isBlocking() || m1->isBlocking() ) { + ThreadBoundryModule *threadModule = new ThreadBoundryModule( 32, m2->inputFormat() ); + threadModule->init(); + m1->connectTo( threadModule, f ); + threadModule->connectTo( m2, f ); + } else { + m1->connectTo( m2, f ); + } +*/ +} + +/* + Connects together module with a module that can process the frame + and then gets the module to process this first frame +*/ +void PipelineManager::unconnectedRoute( Module *m, const Frame &f ) +{ + Module *m2 = moduleMapper()->findModuleWithInputFormat( f.id() ); + if ( m2 ) { + //connectTogether( m, m2, f ); + printf("Connecting together: %s -> %s\n", m->name(), m2->name() ); + staticDispatch( m2, Init, 0 ); + m->connectTo( m2, f ); + m2->connectedFrom( m, f ); + staticDispatch( m2, Process, &f ); + } else { + printf("Didn't find route for %s\n", m->name()); + } +} + +void PipelineManager::makeConnections( Module *start ) +{ +/* + printf("making connections:\n"); + + Frame frame( "UNKNOWN", 0 ); + Module *currentModule = start; + Format dstFmt = destination.front(); + + dispatch( currentModule, Init, 0 ); + printf(" %s (pid: %i)", currentModule->name(), getpid() ); + + while ( currentModule->outputFormat() != dstFmt ) { + Module *m = moduleMapper()->findModuleWithInputFormat( currentModule->outputFormat() ); + if ( m ) { + connectTogether( currentModule, m, frame ); + currentModule = m; + } else { + break; + } + } + printf("\n"); +*/ +} + + +void PipelineManager::execute( void *d ) +{ + printf("starting...\n"); + for ( list::iterator it = sourceModules.begin(); it != sourceModules.end(); ++it ) { + //makeConnections( (*it) ); + staticDispatch( (*it), Init, 0 ); + staticDispatch( (*it), Process, d ); + } +} + + diff --git a/research/pipeline/README.md b/research/pipeline/README.md new file mode 100644 index 0000000..8df026f --- /dev/null +++ b/research/pipeline/README.md @@ -0,0 +1,30 @@ + + +Example sources to support: + +file:/home/user/Documents/images/jpeg/picture.jpg +file:/home/user/Documents/audio/mpeg/greatestsong.mp3 +file:/home/user/Documents/application/playlist/favourites.pls +file:/home/user/Documents/application/playlist/favourites.mpu +http://www.slashdot.org/somefile.mpg +http://www.streaming_radio_server.net:9000 +http://www.streaming_tv_server.net:9000 +camera +microphone +camera & microphone + + +Example outputs to support: + +File/URL +UDP packets +TCP/IP packets +OSS +Alsa +QSS +Visualiser +QDirectPainter +QPainter +XShm +DirectDraw +YUV acceleration diff --git a/research/pipeline/Types/Deadcode.cpp b/research/pipeline/Types/Deadcode.cpp new file mode 100644 index 0000000..d08e52a --- /dev/null +++ b/research/pipeline/Types/Deadcode.cpp @@ -0,0 +1,140 @@ + + +#if 0 + +1 = registerNewFormat("AAC", ".aac", "An AAC decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MP3 decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MAD decoder", AUDIO_CODEC); +1 = registerNewFormat("AAC", ".aac", "My AAC decoder", AUDIO_CODEC); +3 = registerNewFormat("3DS", ".3ds", "3D Studio File", AUDIO_CODEC); + +enum FormatType { + FRAME_ID_FILE_PROTO, + FRAME_ID_HTTP_PROTO, + FRAME_ID_RTSP_PROTO, + FRAME_ID_RTP_PROTO, + FRAME_ID_MMS_PROTO, + + FRAME_ID_GIF_FORMAT, + FRAME_ID_JPG_FORMAT, + FRAME_ID_PNG_FORMAT, + + FRAME_ID_MP3_FORMAT, + FRAME_ID_WAV_FORMAT, + FRAME_ID_GSM_FORMAT, + FRAME_ID_AMR_FORMAT, + + FRAME_ID_MPG_FORMAT, + FRAME_ID_AVI_FORMAT, + FRAME_ID_MP4_FORMAT, + FRAME_ID_MOV_FORMAT, + + FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG_AUDIO_PACKET = FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET, + FRAME_ID_GSM_AUDIO_PACKET, + FRAME_ID_AMR_AUDIO_PACKET, + FRAME_ID_AAC_AUDIO_PACKET, + FRAME_ID_LAST_PACKET_TYPE = FRAME_ID_AMR_AUDIO_PACKET, + + FRAME_ID_VIDEO_PACKET, + FRAME_ID_AUDIO_PACKET, + + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME, + + FRAME_ID_PCM_AUDIO_DATA, + + FRAME_ID_RENDERED_AUDIO, + FRAME_ID_RENDERED_VIDEO, + + FRAME_ID_URL_SOURCE, + FRAME_ID_AUDIO_SOURCE, + FRAME_ID_VIDEO_SOURCE, + + FRAME_ID_MULTIPLE_FORMAT, + FRAME_ID_ANY_ONE_OF_FORMAT, + + FRAME_ID_MULTIPLE_PACKET, + FRAME_ID_ANY_ONE_OF_PACKET, + + FRAME_ID_UNKNOWN +}; + +typedef struct FRAME_GENERIC { +/* + int generalId; + int specificId; + int streamId; +*/ + int bytes; + char* bits; + int pts; +}; + +enum videoCodecId { + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET +}; + +typedef struct FRAME_VIDEO_PACKET { + int codecId; + int bytes; + char* bits; +}; + +enum videoFrameFormat { + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME +}; + +typedef struct FRAME_VIDEO_FRAME { + int format; + int width; + int height; + int bytes; + char* bits; +}; + +struct UpPCMPacket { + int freq; + int bitsPerSample; + int size; + char data[1]; +}; + +struct DownPCMPacket { + +}; + +#endif + + + +/* +struct StreamPacket { + void *private; // AVPacket *pkt; + int streamId; + int size; + char *data; +}; +*/ + +/* +struct StreamPacket { + int streamId; + Frame frame; +}; +*/ + diff --git a/research/pipeline/Types/Format.hpp b/research/pipeline/Types/Format.hpp new file mode 100644 index 0000000..72642b6 --- /dev/null +++ b/research/pipeline/Types/Format.hpp @@ -0,0 +1,29 @@ +#pragma once +#include + +// Format +class Format +{ +public: + Format() : s(nullptr) { } + Format(const Format &other) : s( other.s ) { } + Format(const char *str) : s( str ) { } + bool operator==(const Format& other) + { + return !std::strcmp(other.s, s); + } + operator const char *() + { + return s; + } + bool operator==(const char *other) + { + return !std::strcmp(s, other); + } + bool operator<(const Format& other) const + { + return std::strcmp(other.s, s) < 0; + } +private: + const char *s; +}; diff --git a/research/pipeline/Types/Frame.hpp b/research/pipeline/Types/Frame.hpp new file mode 100644 index 0000000..35ddb08 --- /dev/null +++ b/research/pipeline/Types/Frame.hpp @@ -0,0 +1,51 @@ +#pragma once +#include +#include "Format.hpp" + +// Frame +class Frame +{ +public: + Frame() { } + + Frame( const char* id, void* data ) + : counter( 0 ) + , type( id ) + , bits( data ) + { + pthread_mutex_init( &mutex, NULL ); + } + + void ref() const + { + pthread_mutex_lock( &mutex ); + ++counter; + pthread_mutex_unlock( &mutex ); + } + + void deref() const + { + pthread_mutex_lock( &mutex ); + --counter; + pthread_mutex_unlock( &mutex ); + } + + int refcount() const + { + int ret; + pthread_mutex_lock( &mutex ); + ret = counter; + pthread_mutex_unlock( &mutex ); + return ret; + } + + Format id() const { return type; } + void* data() const { return bits; } + +private: + mutable pthread_mutex_t mutex; + mutable int counter; + Format type; + void *bits; +}; + diff --git a/research/pipeline/Types/Module.hpp b/research/pipeline/Types/Module.hpp new file mode 100644 index 0000000..f0ad0fc --- /dev/null +++ b/research/pipeline/Types/Module.hpp @@ -0,0 +1,118 @@ +#pragma once +#include +#include +#include +#include "Frame.hpp" +#include "Format.hpp" + +class Module; + +enum Commands { Init, Pull, Deref, Process, Simulate, ConnectToModule, ConnectedFrom }; + +typedef Module *Address; + +struct Command { + Address address; + Commands command; + const void *arg; +}; + +// CommandQueue +class CommandQueue { +public: + CommandQueue( int size ); + + void add( const Command & ); + const Command &remove(); + +private: + int max; + const Command **commands; + int in, out; + + pthread_mutex_t mutex; + sem_t free; + sem_t used; +}; + +CommandQueue::CommandQueue( int size ) + : max( size ), in( 0 ), out( 0 ) +{ + commands = new const Command*[max]; + pthread_mutex_init( &mutex, NULL ); + sem_init( &free, 0, max ); + sem_init( &used, 0, 0 ); +} + +void CommandQueue::add( const Command &command ) +{ + while( sem_wait( &free ) != 0 ); + pthread_mutex_lock( &mutex ); + + commands[in] = &command; + in = ( in + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &used ); +} + +const Command &CommandQueue::remove() +{ + while( sem_wait( &used ) != 0 ); + pthread_mutex_lock( &mutex ); + + const Command *command = commands[out]; + out = ( out + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &free ); + + return *command; +} + + + +class ModuleFactory { +public: + ModuleFactory() { } + + virtual const char *name() = 0; + + virtual std::list
threadAffinity() = 0; + virtual bool isBlocking() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; + virtual bool supportsInputFormat( Format ) = 0; + virtual bool supportsOutputFormat( Format ) = 0; + + virtual Module *createInstance() = 0; +}; + + + +// Modules +class Module { +public: + Module() { } + + virtual const char *name() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; +// virtual bool constFrameProcessing() = 0; + +// virtual bool supportsInputType( Format ) = 0; + virtual bool supportsOutputType( Format ) = 0; + +// virtual list inputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } +// virtual list outputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } + + virtual bool isBlocking() = 0;//{ return false; } + virtual std::list
threadAffinity() = 0; + +// virtual void command( Command command, const void *arg, bool priorityFlag ) = 0; + virtual void command( Commands command, const void *arg ) = 0; + + virtual void connectTo( Module *next, const Frame &f ) = 0; + virtual void connectedFrom( Module *next, const Frame &f ) = 0; +}; + diff --git a/research/pipeline/Types/PCMData.hpp b/research/pipeline/Types/PCMData.hpp new file mode 100644 index 0000000..57de038 --- /dev/null +++ b/research/pipeline/Types/PCMData.hpp @@ -0,0 +1,7 @@ +#pragma once + +struct PCMData +{ + int size; + char data[65536]; +}; diff --git a/research/pipeline/Types/Thread.hpp b/research/pipeline/Types/Thread.hpp new file mode 100644 index 0000000..d7922a2 --- /dev/null +++ b/research/pipeline/Types/Thread.hpp @@ -0,0 +1,41 @@ +#pragma once + +// Utils +class Thread { +public: + Thread(); + int start( void* arg ); + +protected: + int run( void* arg ); + static void* entryPoint( void* ); + virtual void setup() { }; + virtual void execute( void* ) = 0; + void* arg() const { return arg_; } + void setArg( void* a ) { arg_ = a; } + +private: + pthread_t tid_; + void* arg_; +}; + +Thread::Thread() {} + +int Thread::start( void* arg ) +{ + setArg(arg); + return pthread_create( &tid_, 0, Thread::entryPoint, this ); +} + +int Thread::run( void* arg ) +{ + printf(" (pid: %i)", getpid() ); + setup(); + execute( arg ); +} + +void* Thread::entryPoint( void* pthis ) +{ + Thread* pt = (Thread*)pthis; + pt->run( pt->arg() ); +} diff --git a/research/pipeline/Types/YUVFrame.hpp b/research/pipeline/Types/YUVFrame.hpp new file mode 100644 index 0000000..109f9a4 --- /dev/null +++ b/research/pipeline/Types/YUVFrame.hpp @@ -0,0 +1,16 @@ +#pragma once +#include "libavcodec/avcodec.h" + + +struct YUVFrame { + int width; + int height; + enum AVPixelFormat fmt; + AVFrame *pic; +/* + uchar *y; + uchar *u; + uchar *v; + int scanlineWidth[3]; +*/ +}; diff --git a/research/pipeline/prototype.cpp b/research/pipeline/prototype.cpp new file mode 100755 index 0000000..a0c03f9 --- /dev/null +++ b/research/pipeline/prototype.cpp @@ -0,0 +1,172 @@ +/* + + Project Carmack 0.01 (AKA Media Library Prototype 01/02) + Copyright John Ryland, 2005 + +*/ + +using namespace std; + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "Types/Frame.hpp" +#include "Types/Thread.hpp" +#include "Types/Module.hpp" + + +#define WIDTH 160 +#define HEIGHT 120 + + + + +/* +class ModulesThread : public Thread, public DispatchInterface { +public: + void execute( void* ) + { + for (;;) { + CommandStruct *command = buffer.remove(); + command->module->command( command->command, command->arg ); + } + } + + void dispatch( CommandStruct *command ) + { + buffer.add( command ); + } + +private: + CommandQueue buffer; +}; +*/ + + + + +static void staticDispatch( Address address, Commands command, const void *arg ) +{ + moduleMapper()->dispatchCommand( address, command, arg ); +} + + + + +struct FFMpegStreamPacket { + AVPacket *packet; +}; + + + + + +void ProcessMessages(); + + + + + + + +Module *a, *b, *c, *d; + + +void registerModules() +{ + moduleMapper()->addModule( new OSSRenderer ); +// moduleMapper()->addModule( d = new YUVRenderer ); + moduleMapper()->addModule( d = new DirectDrawRenderer ); + moduleMapper()->addModule( new MP3DecodeModule ); +// moduleMapper()->addModule( new FFMpegMuxModule ); + moduleMapper()->addModule( new MpegDecodeModule ); +// moduleMapper()->addModule( new MP3SourceModule ); +// moduleMapper()->addModule( new StreamDemuxModule ); + moduleMapper()->addModule( c = new MpegEncodeModule ); +// moduleMapper()->addModule( b = new Splitter ); + moduleMapper()->addModule( new FFMpegSourceModule ); +// moduleMapper()->addModule( a = new VideoCameraSourceModule ); +} + +void playFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_AUDIO" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + //pipelineMgr->start( &file ); + pipelineMgr->execute( &file ); +} + + +void displayCamera() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +void reEncodeFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + pipelineMgr->start( &file ); +} + +void recordVideo() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +int main( int argc, char** argv ) +{ + registerModules(); + pipelineMgr = new PipelineManager; +/* + Frame f; + printf("Connecting together: %s -> %s\n", a->name(), b->name() ); + staticDispatch( b, Init, 0 ); + a->connectTo( b, f ); +// b->connectedFrom( a, f ); + + printf("Connecting together: %s -> %s\n", b->name(), c->name() ); + staticDispatch( c, Init, 0 ); + b->connectTo( c, f ); + + printf("Connecting together: %s -> %s\n", b->name(), d->name() ); + staticDispatch( d, Init, 0 ); + b->connectTo( d, f ); +*/ + playFile( (argc > 1) ? argv[1] : "test.mpg" ); + //reEncodeFile( (argc > 1) ? argv[1] : "test.mpg" ); + //displayCamera(); + //recordVideo(); +} + diff --git a/research/string-tables/.gitignore b/research/string-tables/.gitignore new file mode 100644 index 0000000..7c6ad91 --- /dev/null +++ b/research/string-tables/.gitignore @@ -0,0 +1,61 @@ +build/cmake_install.cmake +build/CMakeCache.txt +build/compile_commands.json +build/FixedStrings.inl +build/libProgram.a +build/libStringsTable.a +build/Makefile +build/StringsTableTest +build/CMakeFiles/cmake.check_cache +build/CMakeFiles/CMakeDirectoryInformation.cmake +build/CMakeFiles/CMakeOutput.log +build/CMakeFiles/CMakeRuleHashes.txt +build/CMakeFiles/feature_tests.bin +build/CMakeFiles/feature_tests.c +build/CMakeFiles/feature_tests.cxx +build/CMakeFiles/Makefile.cmake +build/CMakeFiles/Makefile2 +build/CMakeFiles/progress.marks +build/CMakeFiles/TargetDirectories.txt +build/CMakeFiles/3.5.1/CMakeCCompiler.cmake +build/CMakeFiles/3.5.1/CMakeCXXCompiler.cmake +build/CMakeFiles/3.5.1/CMakeDetermineCompilerABI_C.bin +build/CMakeFiles/3.5.1/CMakeDetermineCompilerABI_CXX.bin +build/CMakeFiles/3.5.1/CMakeSystem.cmake +build/CMakeFiles/3.5.1/CompilerIdC/a.out +build/CMakeFiles/3.5.1/CompilerIdC/CMakeCCompilerId.c +build/CMakeFiles/3.5.1/CompilerIdCXX/a.out +build/CMakeFiles/3.5.1/CompilerIdCXX/CMakeCXXCompilerId.cpp +build/CMakeFiles/Program.dir/build.make +build/CMakeFiles/Program.dir/cmake_clean_target.cmake +build/CMakeFiles/Program.dir/cmake_clean.cmake +build/CMakeFiles/Program.dir/CXX.includecache +build/CMakeFiles/Program.dir/depend.internal +build/CMakeFiles/Program.dir/depend.make +build/CMakeFiles/Program.dir/DependInfo.cmake +build/CMakeFiles/Program.dir/flags.make +build/CMakeFiles/Program.dir/link.txt +build/CMakeFiles/Program.dir/program.cpp.o +build/CMakeFiles/Program.dir/progress.make +build/CMakeFiles/StringsTable.dir/build.make +build/CMakeFiles/StringsTable.dir/cmake_clean_target.cmake +build/CMakeFiles/StringsTable.dir/cmake_clean.cmake +build/CMakeFiles/StringsTable.dir/CXX.includecache +build/CMakeFiles/StringsTable.dir/depend.internal +build/CMakeFiles/StringsTable.dir/depend.make +build/CMakeFiles/StringsTable.dir/DependInfo.cmake +build/CMakeFiles/StringsTable.dir/FixedStrings.cpp.o +build/CMakeFiles/StringsTable.dir/flags.make +build/CMakeFiles/StringsTable.dir/link.txt +build/CMakeFiles/StringsTable.dir/progress.make +build/CMakeFiles/StringsTableTest.dir/build.make +build/CMakeFiles/StringsTableTest.dir/cmake_clean.cmake +build/CMakeFiles/StringsTableTest.dir/CXX.includecache +build/CMakeFiles/StringsTableTest.dir/depend.internal +build/CMakeFiles/StringsTableTest.dir/depend.make +build/CMakeFiles/StringsTableTest.dir/DependInfo.cmake +build/CMakeFiles/StringsTableTest.dir/flags.make +build/CMakeFiles/StringsTableTest.dir/link.txt +build/CMakeFiles/StringsTableTest.dir/main.cpp.o +build/CMakeFiles/StringsTableTest.dir/progress.make +README.pdf diff --git a/research/string-tables/.vscode/Code.code-workspace b/research/string-tables/.vscode/Code.code-workspace new file mode 100644 index 0000000..c7e938e --- /dev/null +++ b/research/string-tables/.vscode/Code.code-workspace @@ -0,0 +1,49 @@ +{ + "folders": [ + { + "path": ".." + }, + { + "path": "../../framework" + } + ], + "settings": { + "files.associations": { + "*.tpp": "cpp", + "functional": "cpp", + "optional": "cpp", + "array": "cpp", + "*.tcc": "cpp", + "cctype": "cpp", + "clocale": "cpp", + "cmath": "cpp", + "cstdarg": "cpp", + "cstdint": "cpp", + "cstdio": "cpp", + "cstdlib": "cpp", + "cwchar": "cpp", + "cwctype": "cpp", + "deque": "cpp", + "unordered_map": "cpp", + "vector": "cpp", + "exception": "cpp", + "algorithm": "cpp", + "system_error": "cpp", + "tuple": "cpp", + "type_traits": "cpp", + "fstream": "cpp", + "initializer_list": "cpp", + "iosfwd": "cpp", + "istream": "cpp", + "limits": "cpp", + "new": "cpp", + "ostream": "cpp", + "numeric": "cpp", + "sstream": "cpp", + "stdexcept": "cpp", + "streambuf": "cpp", + "utility": "cpp", + "typeinfo": "cpp" + } + } +} \ No newline at end of file diff --git a/research/string-tables/.vscode/c_cpp_properties.json b/research/string-tables/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..1b72752 --- /dev/null +++ b/research/string-tables/.vscode/c_cpp_properties.json @@ -0,0 +1,42 @@ +{ + "configurations": [ + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**", + "/usr/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + }, + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "gcc-x64", + "compileCommands": "${workspaceFolder}/build/compile_commands.json" + }, + { + "name": "Mac", + "includePath": [ + "/usr/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + } + }, + { + "name": "Win32", + "includePath": [ + "c:/Program Files (x86)/Microsoft Visual Studio 14.0/VC/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/string-tables/.vscode/launch.json b/research/string-tables/.vscode/launch.json new file mode 100644 index 0000000..ff5abd3 --- /dev/null +++ b/research/string-tables/.vscode/launch.json @@ -0,0 +1,32 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "(gdb) Launch", + "type": "cppdbg", + "request": "launch", + "program": "${workspaceFolder}/build/StringsTableTest", + "args": [], + "stopAtEntry": false, + "cwd": "${workspaceFolder}", + "environment": [ + { + "name": "LD_LIBRARY_PATH", + "value": "" + } + ], + "externalConsole": false, + "MIMode": "gdb", + "setupCommands": [ + { + "description": "Enable pretty-printing for gdb", + "text": "-enable-pretty-printing", + "ignoreFailures": true + } + ] + } + ] +} \ No newline at end of file diff --git a/research/string-tables/.vscode/tasks.json b/research/string-tables/.vscode/tasks.json new file mode 100644 index 0000000..64a18e6 --- /dev/null +++ b/research/string-tables/.vscode/tasks.json @@ -0,0 +1,41 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "Build C++ project", + "type": "shell", + "group": "build", + "command": "cd ./build && make", + "problemMatcher": [] + }, + { + "label": "Build & run C++ project", + "type": "shell", + "group": { + "kind": "build", + "isDefault": true + }, + "command": "cd ./build && make && ./StringsTableTest", + "problemMatcher": [] + }, + { + "label": "Build CMake", + "type": "shell", + "group": "build", + "command": "cd ./build && cmake build .." + }, + { + "label": "Compile Markdown", + "type": "shell", + "args": [], + "command": "${command:extension.markdown-pdf: Export (PDF)}", + + "command2": "markdown-it README.md -o README.html", + "presentation": { + "reveal": "never", + "panel": "shared", + }, + "problemMatcher": [] + } + ] +} \ No newline at end of file diff --git a/research/string-tables/CMakeLists.txt b/research/string-tables/CMakeLists.txt new file mode 100644 index 0000000..4faf650 --- /dev/null +++ b/research/string-tables/CMakeLists.txt @@ -0,0 +1,28 @@ +cmake_minimum_required(VERSION 3.5.0) + +# set the project name and version +project(StringsTableTest VERSION 1.0) + +# specify the C++ standard +set(CMAKE_CXX_STANDARD 14) +set(CMAKE_CXX_STANDARD_REQUIRED True) + +# add the Program library +add_library(Program program.cpp) + +# add the StringsTable library +add_library(StringsTable FixedStrings.cpp FixedStrings.inl) +target_include_directories(StringsTable PUBLIC build) + +# add the executable +add_executable(StringsTableTest main.cpp) +target_link_libraries(StringsTableTest PUBLIC Program StringsTable) + +# add generator to create the strings table +add_custom_command( + OUTPUT ${CMAKE_CURRENT_SOURCE_DIR}/FixedStrings.inl + COMMAND ${CMAKE_CXX_COMPILER} ../main.cpp $ -o dummy 2>&1 + | sed -n 's@.\*undefined.\*cFixedStringId_\\\([[:alnum:]_]\*\\\).\*@DEFINE_FIXED_STRING\(\\1\)@p' + | sort | uniq > FixedStrings.inl + DEPENDS Program +) diff --git a/research/string-tables/FixedStrings.cpp b/research/string-tables/FixedStrings.cpp new file mode 100644 index 0000000..8adc729 --- /dev/null +++ b/research/string-tables/FixedStrings.cpp @@ -0,0 +1,33 @@ +#include "FixedStrings.h" + + +#define DEFINE_FIXED_STRING(x) enumStringIdValue_##x, +enum StringIdsEnum +{ +#include "FixedStrings.inl" +enumStringId_Count +}; +#undef DEFINE_FIXED_STRING + + +#define DEFINE_FIXED_STRING(x) DECLARE_FIXED_STRING(x) = enumStringIdValue_##x; +#include "FixedStrings.inl" +#undef DEFINE_FIXED_STRING + + +#define DEFINE_FIXED_STRING(x) case enumStringIdValue_##x: return #x; +const char* FixedStringFromId(int aStringId) +{ + switch (aStringId) + { +#include "FixedStrings.inl" + } + return "null"; +} +#undef DEFINE_FIXED_STRING + + +int StringTableSize() +{ + return enumStringId_Count; +} diff --git a/research/string-tables/FixedStrings.h b/research/string-tables/FixedStrings.h new file mode 100644 index 0000000..7b86833 --- /dev/null +++ b/research/string-tables/FixedStrings.h @@ -0,0 +1,18 @@ +#pragma once +#ifndef FIXED_STRINGS_H +#define FIXED_STRINGS_H + +// A 'fixed-string' is a compile time string which is +// stored in the read only section of the executable +// and is available as an int. The mapping is fixed +// at compile time. The retrieval of the static string +// that the id maps to is thread safe. + +#define FIXED_STRING_ID(x) cFixedStringId_##x +#define DECLARE_FIXED_STRING(x) extern const int FIXED_STRING_ID(x) + +// Thread-safe +extern const char* FixedStringFromId(int aFixedStringId); +extern int StringTableSize(); + +#endif // FIXED_STRINGS_H diff --git a/research/string-tables/README.md b/research/string-tables/README.md new file mode 100644 index 0000000..b188761 --- /dev/null +++ b/research/string-tables/README.md @@ -0,0 +1,18 @@ + +StringsTableTest +---------------- + +A _fixed-string_ is a compile time string which is +stored in the read only section of the executable +and is available as an int. The mapping is fixed +at compile time. The retrieval of the static string +that the id maps to is thread safe. + +This test shows how this can be integrated with +cmake to be able to at compile time find all the +strings and place them in the strings table. + +This could be useful for a fixed size union that +contains various types as well as string ids which +refer to fixed strings. + diff --git a/research/string-tables/main.cpp b/research/string-tables/main.cpp new file mode 100644 index 0000000..c8f6727 --- /dev/null +++ b/research/string-tables/main.cpp @@ -0,0 +1,7 @@ +extern void program(); + +int main(int argc, char* argv[]) +{ + program(); + return 0; +} diff --git a/research/pipeline/.vscode/c_cpp_properties.json b/research/pipeline/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..54263e4 --- /dev/null +++ b/research/pipeline/.vscode/c_cpp_properties.json @@ -0,0 +1,52 @@ +{ + "configurations": [ + { + "name": "Win32", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + }, + { + "name": "Mac", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64" + }, + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**" + ], + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "clang-x64", + "compilerArgs": [], + "browse": { + "path": [ + "${workspaceFolder}/**" + ], + "limitSymbolsToIncludedHeaders": true + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/pipeline/3rdParty/ffmpeg b/research/pipeline/3rdParty/ffmpeg new file mode 160000 index 0000000..b6d7c4c --- /dev/null +++ b/research/pipeline/3rdParty/ffmpeg @@ -0,0 +1 @@ +Subproject commit b6d7c4c1d48a30fdccf00fa971c4821b66f24c41 diff --git a/research/pipeline/Makefile b/research/pipeline/Makefile new file mode 100755 index 0000000..84427c9 --- /dev/null +++ b/research/pipeline/Makefile @@ -0,0 +1,10 @@ + +all: prototype.cpp + g++ prototype.cpp -I/usr/include/ -I3rdParty/ffmpeg -I3rdParty/ffmpeg/libavcodec -I3rdParty/ffmpeg/libavformat -L3rdParty/ffmpeg/libavcodec -L3rdParty/ffmpeg/libavformat -lavformat -lavcodec -lz -lpthread + +# -lddraw -lgdi32 + +deps: + mkdir -p 3rdParty && cd 3rdParty && [ -d ffmpeg ] || git clone https://git.ffmpeg.org/ffmpeg.git ffmpeg + sudo apt-get install nasm + cd 3rdParty/ffmpeg && ./configure && make diff --git a/research/pipeline/ModuleMapper.cpp b/research/pipeline/ModuleMapper.cpp new file mode 100644 index 0000000..658fc7d --- /dev/null +++ b/research/pipeline/ModuleMapper.cpp @@ -0,0 +1,71 @@ +#include +#include +#include "Types/Module.hpp" +#include "Types/Format.hpp" + + +class DispatchInterface { +public: + virtual void dispatch( Command *command ) = 0; +}; + + +class ModuleMapper { +public: + void addModule( Module *module ) + { + modules.push_back(module); + } + + void addMapping( Address address, DispatchInterface *dispatcher ) + { + dispatchAddressMap[address] = dispatcher; + } + + Module *findModuleWithInputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->inputFormat() == format ) { + return (*it); + } + } + return 0; + } + + Module *findModuleWithOutputFormat( Format format ) + { + for ( std::list::iterator it = modules.begin(); it != modules.end(); ++it ) { + if ( (*it)->outputFormat() == format ) { + return (*it); + } + } + } + + DispatchInterface *lookup( Address address ) + { + return dispatchAddressMap[address]; + } + + void dispatchCommand( Address address, Commands command, const void *arg ) + { + Command *cmd = new Command; + cmd->command = command; + cmd->arg = arg; + cmd->address = address; +// lookup( cmd->address )->dispatch( cmd ); + address->command( cmd->command, cmd->arg ); + } + +private: + std::list modules; + std::map dispatchAddressMap; + std::multimap inputFormatModuleMap; + std::multimap outputFormatModuleMap; +}; + + +ModuleMapper *moduleMapper() +{ + static ModuleMapper *staticModuleMapper = 0; + return staticModuleMapper ? staticModuleMapper : staticModuleMapper = new ModuleMapper; +} diff --git a/research/pipeline/Modules/DirectDrawRenderer.cpp b/research/pipeline/Modules/DirectDrawRenderer.cpp new file mode 100644 index 0000000..d62bfba --- /dev/null +++ b/research/pipeline/Modules/DirectDrawRenderer.cpp @@ -0,0 +1,529 @@ +#include "libavcodec/avcodec.h" +#include "libswresample/swresample.h" +#include "libswscale/swscale.h" + +enum ColorFormat { + RGB565, + BGR565, + RGBA8888, + BGRA8888 +}; + +class VideoScaleContext { +public: + AVPicture outputPic1; + AVPicture outputPic2; + AVPicture outputPic3; + + VideoScaleContext() { + //img_convert_init(); + videoScaleContext2 = 0; + outputPic1.data[0] = 0; + outputPic2.data[0] = 0; + outputPic3.data[0] = 0; + } + + virtual ~VideoScaleContext() { + free(); + } + + void free() { + if ( videoScaleContext2 ) + sws_freeContext(videoScaleContext2); + videoScaleContext2 = 0; + if ( outputPic1.data[0] ) + avpicture_free(&outputPic1); + outputPic1.data[0] = 0; + if ( outputPic2.data[0] ) + avpicture_free(&outputPic2); + outputPic2.data[0] = 0; + if ( outputPic3.data[0] ) + avpicture_free(&outputPic3); + outputPic3.data[0] = 0; + } + + void init() { + scaleContextDepth = -1; + scaleContextInputWidth = -1; + scaleContextInputHeight = -1; + scaleContextPicture1Width = -1; + scaleContextPicture2Width = -1; + scaleContextOutputWidth = -1; + scaleContextOutputHeight = -1; + scaleContextLineStride = -1; + } + + bool configure(int w, int h, int outW, int outH, AVFrame *picture, int lineStride, int fmt, ColorFormat outFmt ) { + int colorMode = -1; + switch ( outFmt ) { + case RGB565: colorMode = AV_PIX_FMT_RGB565; break; + case BGR565: colorMode = AV_PIX_FMT_RGB565; break; + case RGBA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + case BGRA8888: colorMode = AV_PIX_FMT_RGB32_1; break; + }; + scaleContextFormat = fmt; + scaleContextDepth = colorMode; + if ( scaleContextInputWidth != w || scaleContextInputHeight != h + || scaleContextOutputWidth != outW || scaleContextOutputHeight != outH ) { + scaleContextInputWidth = w; + scaleContextInputHeight = h; + scaleContextOutputWidth = outW; + scaleContextOutputHeight = outH; + scaleContextLineStride = lineStride; + free(); + + videoScaleContext2 = sws_getContext(w, h, AV_PIX_FMT_RGB32_1, outW, outH, (AVPixelFormat)colorMode, 0, nullptr, nullptr, nullptr); + + if ( !videoScaleContext2 ) + return false; + if ( avpicture_alloc(&outputPic1, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic2, (AVPixelFormat)scaleContextDepth, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + if ( avpicture_alloc(&outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight) < 0 ) + return false; + } + return true; + } + + void convert(uint8_t *output, AVFrame *picture) { + if ( !videoScaleContext2 || !picture || !outputPic1.data[0] || !outputPic2.data[0] ) + return; + + // XXXXXXXXX This sucks ATM, converts to YUV420P, scales, then converts to output format + // first conversion needed because img_resample assumes YUV420P, doesn't seem to + // behave with packed image formats + + img_convert(&outputPic1, AV_PIX_FMT_YUV420P, (AVPicture*)picture, scaleContextFormat, scaleContextInputWidth, scaleContextInputHeight); + + img_resample(videoScaleContext2, &outputPic3, &outputPic1); + + img_convert(&outputPic2, scaleContextDepth, &outputPic3, AV_PIX_FMT_YUV420P, scaleContextOutputWidth, scaleContextOutputHeight); + + sws_scale(videoScaleContext2, picture->buf[0]->data const uint8_t *const srcSlice[], + const int srcStride[], int srcSliceY, int srcSliceH, + uint8_t *const dst[], const int dstStride[]); + + //img_resample(videoScaleContext2, &outputPic1, (AVPicture*)picture); + //img_convert(&outputPic2, scaleContextDepth, &outputPic1, scaleContextFormat, scaleContextOutputWidth, scaleContextOutputHeight); + + int offset = 0; + for ( int i = 0; i < scaleContextOutputHeight; i++ ) { + memcpy( output, outputPic2.data[0] + offset, outputPic2.linesize[0] ); + output += scaleContextLineStride; + offset += outputPic2.linesize[0]; + } + } + +private: + struct SwsContext *videoScaleContext2; + int scaleContextDepth; + int scaleContextInputWidth; + int scaleContextInputHeight; + int scaleContextPicture1Width; + int scaleContextPicture2Width; + int scaleContextOutputWidth; + int scaleContextOutputHeight; + int scaleContextLineStride; + int scaleContextFormat; +}; + + +#ifdef _WIN32 + + +#include +#include + +enum display_method { USE_WINDOWS_API, USE_DIRECT_DRAW }; + +// Generic Global Variables +HWND MainWnd_hWnd; +HINSTANCE g_hInstance; +HDC hdc; +HPALETTE oldhpal; +RECT r; + +// DirectDraw specific Variables +LPDIRECTDRAW lpDD = NULL; +LPDIRECTDRAWSURFACE lpDDSPrimary = NULL; // DirectDraw primary surface +LPDIRECTDRAWSURFACE lpDDSOne = NULL; // Offscreen surface #1 +DDSURFACEDESC ddsd; + +// Standard Windows API specific Variables +HDC hdcMemory; +HBITMAP hbmpMyBitmap, hbmpOld; + +// User decided variables +int _method__; // API or DirectDraw +int _do_full_; // Full screen +int _do_flip_; // Page flipping +int _double__; // Double window size +int _on_top__; // Always on top +int _rate____; // Calculate frame rate + +// Interface Variables +unsigned char *DoubleBuffer; + +// Resolution Variables +int width; +int height; +int bytes_per_pixel; + + +#define fatal_error(message) _fatal_error(message, __FILE__, __LINE__) +void _fatal_error(char *message, char *file, int line); + +// Fatal error handler (use the macro version in header file) +void _fatal_error(char *message, char *file, int line) +{ + char error_message[1024]; + sprintf(error_message, "%s, in %s at line %d", message, file, line); + puts(error_message); + MessageBox(NULL, error_message, "Fatal Error!", MB_OK); + exit(EXIT_FAILURE); +} + + +class MSWindowsWindow { +}; + + +class DirectDrawWindow { +}; + + +// Flip/Blt Doublebuffer to screen (updating &doublebuffer if necassery) +void MyShowDoubleBuffer(void) +{ + if (_method__ == USE_DIRECT_DRAW) { + + if (_do_flip_) { + // Page flipped DirectDraw + if (IDirectDrawSurface_Lock(lpDDSPrimary, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if(IDirectDrawSurface_Flip(lpDDSPrimary,lpDDSOne,0)==DDERR_SURFACELOST) { + IDirectDrawSurface_Restore(lpDDSPrimary); + IDirectDrawSurface_Restore(lpDDSOne); + } + + } else { + // Non Page flipped DirectDraw + POINT pt; + HDC hdcx; + ShowCursor(0); + + if (_do_full_) { + if(IDirectDrawSurface_BltFast(lpDDSPrimary,0,0,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY)==DDERR_SURFACELOST) + IDirectDrawSurface_Restore(lpDDSPrimary), + IDirectDrawSurface_Restore(lpDDSOne); + } else { + GetDCOrgEx(hdcx = GetDC(MainWnd_hWnd), &pt); + ReleaseDC(MainWnd_hWnd, hdcx); + IDirectDrawSurface_BltFast(lpDDSPrimary,pt.x,pt.y,lpDDSOne,&r,DDBLTFAST_NOCOLORKEY); + } + + ShowCursor(1); + } + } else { + // Using Windows API + // BltBlt from memory to screen using standard windows API + SetBitmapBits(hbmpMyBitmap, width*height*bytes_per_pixel, DoubleBuffer); + if (_double__) + StretchBlt(hdc, 0, 0, 2*width, 2*height, hdcMemory, 0, 0, width, height, SRCCOPY); + else + BitBlt(hdc, 0, 0, width, height, hdcMemory, 0, 0, SRCCOPY); + } +} + +int done = 0; + +// Shut down application +void MyCloseWindow(void) +{ + if (done == 0) + { + done = 1; + + if (_method__ == USE_DIRECT_DRAW) { + ShowCursor(1); + if(lpDD != NULL) { + if(lpDDSPrimary != NULL) + IDirectDrawSurface_Release(lpDDSPrimary); + if (!_do_flip_) + if(lpDDSOne != NULL) + IDirectDrawSurface_Release(lpDDSOne); + IDirectDrawSurface_Release(lpDD); + } + lpDD = NULL; + lpDDSOne = NULL; + lpDDSPrimary = NULL; + } else { + /* release buffer */ + free(DoubleBuffer); + // Release interfaces to BitBlt functionality + SelectObject(hdcMemory, hbmpOld); + DeleteDC(hdcMemory); + } + ReleaseDC(MainWnd_hWnd, hdc); + PostQuitMessage(0); + + } +} + +// Do the standard windows message loop thing +void MyDoMessageLoop(void) +{ + MSG msg; + while(GetMessage(&msg, NULL, 0, 0 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } + exit(msg.wParam); +} + + +void ProcessMessages() +{ + MSG msg; + while (PeekMessage(&msg, NULL, 0, 0, 1 )) + { + TranslateMessage(&msg); + DispatchMessage(&msg); + } +} + + + +LRESULT CALLBACK WndProc(HWND hWnd, UINT iMessage, WPARAM wParam, LPARAM lParam) +{ + if ( iMessage == WM_SIZE ) { + width = lParam & 0xFFFF; + height = (lParam >> 16) + 4; + printf("resize: %i x %i (%i %i)\n", width, height, (uint)lParam & 0xFFFF, lParam >> 16); + } + return DefWindowProc(hWnd, iMessage, wParam, lParam); +} + + + +// Setup the application +void MyCreateWindow() +{ + DDSCAPS ddscaps; + WNDCLASS wndclass; // Structure used to register Windows class. + HINSTANCE hInstance = 0;//g_hInstance; + + wndclass.style = 0; + wndclass.lpfnWndProc = WndProc;//DefWindowProc; + wndclass.cbClsExtra = 0; + wndclass.cbWndExtra = 0; + wndclass.hInstance = hInstance; + wndclass.hIcon = LoadIcon(hInstance, "3D-MAGIC"); + wndclass.hCursor = LoadCursor(NULL, IDC_ARROW); + wndclass.hbrBackground = (HBRUSH)GetStockObject(BLACK_BRUSH); + wndclass.lpszMenuName = NULL; + wndclass.lpszClassName = "DDraw Renderer Module"; + + if (!RegisterClass(&wndclass)) + fatal_error("Error Registering Window"); + + if (!(MainWnd_hWnd = CreateWindow("DDraw Renderer Module", "Media Player", + WS_OVERLAPPEDWINDOW | WS_VISIBLE, /* Window style. */ + CW_USEDEFAULT, CW_USEDEFAULT, /* Default position. */ + + // take into account window border, and create a larger + // window if stretching to double the window size. + (_double__) ? 2*width + 10 : width + 10, + (_double__) ? 2*height + 30 : height + 30, + NULL, NULL, hInstance, NULL))) + fatal_error("Error Creating Window"); + + hdc = GetDC(MainWnd_hWnd); + + r.left = 0; + r.top = 0; + r.right = width; + r.bottom = height; + + if (_method__ == USE_DIRECT_DRAW) + { + if (DirectDrawCreate(NULL, &lpDD, NULL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDCreate)"); + + if (_do_full_) + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, DDSCL_EXCLUSIVE | DDSCL_FULLSCREEN | DDSCL_ALLOWMODEX) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + if (IDirectDraw_SetDisplayMode(lpDD, width, height, 8*bytes_per_pixel) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetDisplayMode)"); + } + else + { + if (IDirectDraw_SetCooperativeLevel(lpDD, MainWnd_hWnd, /* DDSCL_EXCLUSIVE | */ DDSCL_NORMAL) != DD_OK) + fatal_error("Error initialising DirectDraw (DDSetCoopLevel)"); + + _do_flip_ = 0; + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + } + + if (_do_flip_) + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT; + ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP | DDSCAPS_COMPLEX; + ddsd.dwBackBufferCount = 1; + if (IDirectDraw_CreateSurface(lpDD, &ddsd, &lpDDSPrimary, NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + // Get the pointer to the back buffer + ddscaps.dwCaps = DDSCAPS_BACKBUFFER; + if (IDirectDrawSurface_GetAttachedSurface(lpDDSPrimary, &ddscaps, &lpDDSOne) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + } + else + { + ddsd.dwSize = sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS; + ddsd.ddsCaps.dwCaps=DDSCAPS_PRIMARYSURFACE; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSPrimary,NULL) != DD_OK) + fatal_error("Error Creating a Primiary DirectDraw Surface (DDCreateSurface)"); + + ddsd.dwSize=sizeof(ddsd); + ddsd.dwFlags=DDSD_CAPS|DDSD_HEIGHT|DDSD_WIDTH; + ddsd.ddsCaps.dwCaps=DDSCAPS_OFFSCREENPLAIN; + ddsd.dwWidth=width; + ddsd.dwHeight=height; + if (IDirectDraw_CreateSurface(lpDD,&ddsd,&lpDDSOne,NULL) != DD_OK) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + + if (lpDDSOne == NULL) + fatal_error("Error Creating a DirectDraw Off Screen Surface (DDCreateSurface)"); + } + + // Get pointer to buffer surface + if (IDirectDrawSurface_Lock(lpDDSOne, NULL, &ddsd, 0, NULL) != DD_OK) + fatal_error("Error Locking a DirectDraw Surface (DDSurfaceLock)"); + DoubleBuffer = (unsigned char *)ddsd.lpSurface; + if (IDirectDrawSurface_Unlock(lpDDSOne, NULL) != DD_OK) + fatal_error("Error Unlocking a DirectDraw Surface (DDSurfaceUnlock)"); + + if (_do_flip_) + ShowCursor(0); + } + else /* Windows API */ + { + bytes_per_pixel = GetDeviceCaps(hdc, BITSPIXEL) >> 3; + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hdcMemory = CreateCompatibleDC(hdc); + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + { + HPALETTE hpal; + PALETTEENTRY mypal[64*3+16]; + int i; + LOGPALETTE *plgpl; + + plgpl = (LOGPALETTE*) LocalAlloc(LPTR, + sizeof(LOGPALETTE) + (16+3*64)*sizeof(PALETTEENTRY)); + + plgpl->palNumEntries = 64*3+16; + plgpl->palVersion = 0x300; + + for (i = 16; i < 64+16; i++) + { + plgpl->palPalEntry[i].peRed = mypal[i].peRed = LOBYTE(i << 2); + plgpl->palPalEntry[i].peGreen = mypal[i].peGreen = 0; + plgpl->palPalEntry[i].peBlue = mypal[i].peBlue = 0; + plgpl->palPalEntry[i].peFlags = mypal[i].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+64].peRed = mypal[i+64].peRed = 0; + plgpl->palPalEntry[i+64].peGreen = mypal[i+64].peGreen = LOBYTE(i << 2); + plgpl->palPalEntry[i+64].peBlue = mypal[i+64].peBlue = 0; + plgpl->palPalEntry[i+64].peFlags = mypal[i+64].peFlags = PC_RESERVED; + + plgpl->palPalEntry[i+128].peRed = mypal[i+128].peRed = 0; + plgpl->palPalEntry[i+128].peGreen = mypal[i+128].peGreen = 0; + plgpl->palPalEntry[i+128].peBlue = mypal[i+128].peBlue = LOBYTE(i << 2); + plgpl->palPalEntry[i+128].peFlags = mypal[i+128].peFlags = PC_RESERVED; + } + + hpal = CreatePalette(plgpl); + oldhpal = SelectPalette(hdc, hpal, FALSE); + + RealizePalette(hdc); + + } + + } +} + + + +class DirectDrawRenderer : public SimpleModule { + public: + DirectDrawRenderer() { + width = 320 + 32; + height = 240; + _method__ = 0; // API or DirectDraw + _do_full_ = 0; // Full screen + _do_flip_ = 0; // Page flipping + _double__ = 0; // Double window size + _on_top__ = 0; // Always on top + _rate____ = 0; // Calculate frame rate + } + void init() { + MyCreateWindow(); + } + void process( const Frame &f ) { + const Frame *frame = &f; + if ( frame && frame->refcount() ) { + + +//printf("width: %i height: %i\n", width, height); + + + free(DoubleBuffer); + SelectObject(hdcMemory, hbmpOld); + DeleteDC((HDC)hbmpMyBitmap); + //DeleteDC(hdcMemory); + + DoubleBuffer = (unsigned char *)malloc(width*height*bytes_per_pixel); + if (DoubleBuffer == NULL) + fatal_error("Unable to allocate enough main memory for an offscreen Buffer"); + + // Initialise interface to BitBlt function + hbmpMyBitmap = CreateCompatibleBitmap(hdc, width, height); + hbmpOld = (HBITMAP)SelectObject(hdcMemory, hbmpMyBitmap); + + + YUVFrame *picture = (YUVFrame *)frame->data(); + if (!videoScaleContext.configure(picture->width, picture->height, width, height, + picture->pic, width * 4, picture->fmt, RGBA8888)) + return; + videoScaleContext.convert(DoubleBuffer, picture->pic); + MyShowDoubleBuffer(); + frame->deref(); + } + } + const char *name() { return "YUV Renderer"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_RENDERED_VIDEO"; } + bool isBlocking() { return true; } + private: + VideoScaleContext videoScaleContext; +}; + + +#endif // _WIN32 diff --git a/research/pipeline/Modules/FFMpegMuxModule.cpp b/research/pipeline/Modules/FFMpegMuxModule.cpp new file mode 100644 index 0000000..aa8c5cd --- /dev/null +++ b/research/pipeline/Modules/FFMpegMuxModule.cpp @@ -0,0 +1,106 @@ + + +class FFMpegMuxModule : public SimpleModule { +public: + FFMpegMuxModule() : outputFileContext( 0 ) + { + } + + void init() + { +printf("A %i\n", __LINE__); + av_register_all(); + + outputFileContext = av_alloc_format_context(); + outputFileContext->oformat = guess_format("avi", 0, 0); + AVStream *videoStream = av_new_stream( outputFileContext, outputFileContext->nb_streams+1 ); + //AVStream *audioStream = av_new_stream( AVFormatContext, outputFileContext->nb_streams+1 ); +printf("A %i\n", __LINE__); + + assert( videoStream ); + assert( outputFileContext->oformat ); + + AVCodecContext *video_enc = &videoStream->codec; + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + assert( avcodec_open( video_enc, codec ) >= 0 ); + + video_enc->codec_type = CODEC_TYPE_VIDEO; + video_enc->codec_id = CODEC_ID_MPEG1VIDEO;//CODEC_ID_MPEG4; // CODEC_ID_H263, CODEC_ID_H263P +// video_enc->bit_rate = video_bit_rate; +// video_enc->bit_rate_tolerance = video_bit_rate_tolerance; + + video_enc->frame_rate = 10;//25;//frame_rate; + video_enc->frame_rate_base = 1;//frame_rate_base; + video_enc->width = WIDTH;//frame_width + frame_padright + frame_padleft; + video_enc->height = HEIGHT;//frame_height + frame_padtop + frame_padbottom; + + video_enc->pix_fmt = PIX_FMT_YUV420P; + + if( av_set_parameters( outputFileContext, NULL ) < 0 ) { + cerr << "Invalid output format parameters\n"; + exit(1); + } + +printf("A %i\n", __LINE__); +// strcpy( outputFileContext->comment, "Created With Project Carmack" ); +// strcpy( outputFileContext->filename, "blah.avi" ); + +// if ( url_fopen( &outputFileContext->pb, outputFileContext->filename, URL_WRONLY ) < 0 ) { + if ( url_fopen( &outputFileContext->pb, "blah2.avi", URL_WRONLY ) < 0 ) { + printf( "Couldn't open output file: %s\n", outputFileContext->filename ); + exit( 1 ); + } +printf("A %i\n", __LINE__); + + if ( av_write_header( outputFileContext ) < 0 ) { + printf( "Could not write header for output file %s\n", outputFileContext->filename ); + exit( 1 ); + } + +printf("A %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("B %i\n", __LINE__); + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + //av_dup_packet( pkt ); + + if ( !outputFileContext ) { + printf("can't process video data without a context\n"); + return; + } + +/* + pkt.stream_index= ost->index; + pkt.data= audio_out; + pkt.size= ret; + if(enc->coded_frame) + pkt.pts= enc->coded_frame->pts; + pkt.flags |= PKT_FLAG_KEY; +*/ +printf("B %i\n", __LINE__); + if ( pkt->data ) { +printf("B %i\n", __LINE__); + av_interleaved_write_frame(outputFileContext, pkt); + } else { + printf( "End of data\n" ); + av_write_trailer(outputFileContext); + exit( 0 ); + } +printf("B %i\n", __LINE__); + + frame.deref(); + } + + const char *name() { return "AVI Muxer"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_URL_SINK"; } + bool isBlocking() { return true; } + +private: + AVFormatContext *outputFileContext; +}; + diff --git a/research/pipeline/Modules/FFMpegSourceModule.cpp b/research/pipeline/Modules/FFMpegSourceModule.cpp new file mode 100644 index 0000000..4fba71e --- /dev/null +++ b/research/pipeline/Modules/FFMpegSourceModule.cpp @@ -0,0 +1,119 @@ + + +class FFMpegSourceModule : public SimpleModule { +public: + FFMpegSourceModule() : avFormatContext( 0 ) + { + } + + bool supportsOutputType( Format type ) + { + return type == "FRAME_ID_MPEG1_VIDEO_PACKET" || type == "FRAME_ID_MPEG_AUDIO_PACKET" || type == "FRAME_ID_MPEG2_VIDEO_PACKET" || type == "FRAME_ID_MPEG4_VIDEO_PACKET"; + } + + const char* name() { return "FFMpeg Demuxer Source"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MULTIPLE_PACKET"; } + bool isBlocking() { return true; } + list threadAffinity() { } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) + { + printf("file: %s\n", (char*)frame.data()); + + // Open file + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), 0, 0, 0) < 0 || !avFormatContext ) { + printf("error opening file"); + return; + } + + frame.deref(); + + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + + while( avFormatContext ) { + AVPacket *pkt = new AVPacket; +// if ( av_read_packet(avFormatContext, pkt) < 0 ) { + if ( av_read_frame(avFormatContext, pkt) < 0 ) { + printf("error reading packet\n"); + av_free_packet( pkt ); + delete pkt; + exit( 0 ); // EOF ? + } else { + AVCodecContext *context = &avFormatContext->streams[pkt->stream_index]->codec; + Frame *f = getAvailableFrame( context->codec_type ); + if ( !f ) + continue; + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)f->data(); + packet->packet = pkt; + //av_dup_packet( pkt ); + + ProcessMessages(); + + dispatch( routes[pkt->stream_index], Process, f ); + } + } + exit( 0 ); + } + + Frame *getAvailableFrame( int type ) + { + Frame *frame; + list::iterator it; + for ( it = used[type].begin(); it != used[type].end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + + // Create new frame + frame = createNewFrame( type ); + if ( frame ) { + frame->ref(); + used[type].push_back( frame ); + } + return frame; + } + + Frame* createNewFrame( int type ) + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + switch( type ) { + case CODEC_TYPE_AUDIO: + return new Frame( "FRAME_ID_MPEG_AUDIO_PACKET", packet ); + case CODEC_TYPE_VIDEO: + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + return 0; + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + av_free_packet( packet->packet ); + delete packet->packet; + } + + void connectTo( Module *next, const Frame &f ) + { + routes[((FFMpegStreamPacket*)f.data())->packet->stream_index] = next; + } + +private: + AVFormatContext *avFormatContext; + map > used; + map routes; +}; + diff --git a/research/pipeline/Modules/MP3DecodeModule.cpp b/research/pipeline/Modules/MP3DecodeModule.cpp new file mode 100644 index 0000000..60053f5 --- /dev/null +++ b/research/pipeline/Modules/MP3DecodeModule.cpp @@ -0,0 +1,51 @@ + +class MP3DecodeModule : public SimpleModule { +public: + MP3DecodeModule() : audioCodecContext( 0 ) + { + } + + void init() + { + av_register_all(); + + if ( avcodec_open( audioCodecContext = avcodec_alloc_context(), &mp3_decoder ) < 0 ) { + printf("error opening context\n"); + audioCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + Frame *f = getAvailableFrame(); + PCMData *pcm = (PCMData *)f->data(); + int count = 0, ret = 0, bytesRead; + AVPacket *mp3 = pkt; + unsigned char *ptr = (unsigned char*)mp3->data; + for ( int len = mp3->size; len && ret >= 0; len -= ret, ptr += ret ) { + ret = avcodec_decode_audio(audioCodecContext, (short*)(pcm->data + count), &bytesRead, ptr, len); + if ( bytesRead > 0 ) + count += bytesRead; + } + frame.deref(); + + pcm->size = count; + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + return new Frame( "FRAME_ID_PCM_AUDIO_DATA", new PCMData ); + } + + const char *name() { return "MP3 Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + Format outputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *audioCodecContext; +}; + diff --git a/research/pipeline/Modules/MP3SourceModule.cpp b/research/pipeline/Modules/MP3SourceModule.cpp new file mode 100644 index 0000000..d40c9bf --- /dev/null +++ b/research/pipeline/Modules/MP3SourceModule.cpp @@ -0,0 +1,38 @@ + + +class MP3SourceModule : public SimpleModule { +public: + MP3SourceModule() : avFormatContext( 0 ) + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + printf("file: %s\n", (char*)frame.data()); + if ( av_open_input_file(&avFormatContext, (char*)frame.data(), NULL, 0, 0) < 0 || !avFormatContext ) + printf("error opening file"); + + while( avFormatContext ) { + if ( av_read_packet(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + SimpleModule::process( Frame( "FRAME_ID_MPEG_AUDIO_PACKET", &pkt ) ); + } + } + } + + const char *name() { return "MP3 Reader"; } + Format inputFormat() { return "FRAME_ID_URL_SOURCE"; } + Format outputFormat() { return "FRAME_ID_MPEG_AUDIO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVPacket pkt; + AVFormatContext *avFormatContext; +}; + + diff --git a/research/pipeline/Modules/MpegDecodeModule.cpp b/research/pipeline/Modules/MpegDecodeModule.cpp new file mode 100644 index 0000000..5802c9d --- /dev/null +++ b/research/pipeline/Modules/MpegDecodeModule.cpp @@ -0,0 +1,82 @@ +#include "Modules/SimpleModule.hpp" +#include "libavcodec/avcodec.h" +#include "libavformat/avformat.h" + + +class MpegDecodeModule : public SimpleModule { +public: + MpegDecodeModule() : videoCodecContext( 0 ) + { + currentFrame = 0; + } + + void init() + { + av_register_all(); + + if ( avcodec_open( videoCodecContext = avcodec_alloc_context(), &mpeg1video_decoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } + } + + void process( const Frame &frame ) + { + AVPacket *pkt = ((FFMpegStreamPacket*)frame.data())->packet; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + if ( !currentFrame ) + currentFrame = getAvailableFrame(); + + YUVFrame *yuvFrame = (YUVFrame *)currentFrame->data(); + AVFrame *picture = yuvFrame->pic; + + assert( videoCodecContext->pix_fmt == PIX_FMT_YUV420P ); + +//printf("processing video data (%i x %i)\n", videoCodecContext->width, videoCodecContext->height); + AVPacket *mpeg = pkt; + unsigned char *ptr = (unsigned char*)mpeg->data; + int count = 0, ret = 0, gotPicture = 0; + // videoCodecContext->hurry_up = 2; + int len = mpeg->size; +// for ( ; len && ret >= 0; len -= ret, ptr += ret ) + ret = avcodec_decode_video( videoCodecContext, picture, &gotPicture, ptr, len ); + frame.deref(); + + if ( gotPicture ) { + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; + yuvFrame->fmt = videoCodecContext->pix_fmt; + SimpleModule::process( *currentFrame ); + currentFrame = 0; + } + } + + Frame* createNewFrame() + { + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = avcodec_alloc_frame(); + return new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + } + + void reuseFrame( Frame *frame ) + { + YUVFrame *yuvFrame = (YUVFrame *)frame->data(); + av_free( yuvFrame->pic ); + yuvFrame->pic = avcodec_alloc_frame(); + } + + const char *name() { return "Mpeg1 Video Decoder"; } + Format inputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + Frame *currentFrame; + AVCodecContext *videoCodecContext; +}; + diff --git a/research/pipeline/Modules/MpegEncodeModule.cpp b/research/pipeline/Modules/MpegEncodeModule.cpp new file mode 100644 index 0000000..dc7206a --- /dev/null +++ b/research/pipeline/Modules/MpegEncodeModule.cpp @@ -0,0 +1,125 @@ + + +class MpegEncodeModule : public SimpleModule { +public: + MpegEncodeModule() : videoCodecContext( 0 ) + { + } + + void init() + { +printf("S %i\n", __LINE__); + av_register_all(); + + videoCodecContext = avcodec_alloc_context(); + + AVCodec *codec = avcodec_find_encoder(CODEC_ID_MPEG1VIDEO); + assert( codec ); + +/* + if ( avcodec_open( videoCodecContext, &mpeg1video_encoder ) < 0 ) { + printf("error opening context\n"); + videoCodecContext = 0; + } +*/ + +/* + videoCodecContext->bit_rate = 400000; + videoCodecContext->gop_size = 10; + videoCodecContext->max_b_frames = 1; +*/ + videoCodecContext->width = WIDTH; + videoCodecContext->height = HEIGHT; + videoCodecContext->frame_rate = 25; + videoCodecContext->frame_rate_base= 1; + videoCodecContext->pix_fmt=PIX_FMT_YUV420P; + videoCodecContext->codec_type = CODEC_TYPE_VIDEO; + videoCodecContext->codec_id = CODEC_ID_MPEG1VIDEO; + + assert( avcodec_open( videoCodecContext, codec ) >= 0 ); + +printf("S %i\n", __LINE__); + } + + void process( const Frame &frame ) + { +printf("T %i\n", __LINE__); + YUVFrame *yuvFrame = (YUVFrame*)frame.data(); + AVFrame *picture = yuvFrame->pic; + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + Frame *f = getAvailableFrame(); + + FFMpegStreamPacket *ffmpeg = (FFMpegStreamPacket*)f->data(); + AVPacket *packet = ffmpeg->packet; + +printf("T %i\n", __LINE__); + +// 160*120*4 = 76800 + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, picture->linesize[0], picture->linesize[1], picture->linesize[2] ); + + AVFrame tmpPic; + if ( avpicture_alloc((AVPicture*)&tmpPic, PIX_FMT_YUV420P, yuvFrame->width, yuvFrame->height) < 0 ) + printf("blah1\n"); + img_convert((AVPicture*)&tmpPic, PIX_FMT_YUV420P, (AVPicture*)picture, yuvFrame->fmt, + yuvFrame->width, yuvFrame->height ); + + printf(" %i x %i %i %i %i \n", yuvFrame->width, yuvFrame->height, tmpPic.linesize[0], tmpPic.linesize[1], tmpPic.linesize[2] ); + + static int64_t pts = 0; + tmpPic.pts = AV_NOPTS_VALUE; + pts += 5000; + +// int ret = avcodec_encode_video( videoCodecContext, (uchar*)av_malloc(1000000), 1024*256, &tmpPic ); + packet->size = avcodec_encode_video( videoCodecContext, packet->data, packet->size, &tmpPic ); + + if ( videoCodecContext->coded_frame ) { + packet->pts = videoCodecContext->coded_frame->pts; + if ( videoCodecContext->coded_frame->key_frame ) + packet->flags |= PKT_FLAG_KEY; + } + +printf("T %i\n", __LINE__); + + cerr << "encoded: " << packet->size << " bytes" << endl; +printf("T %i\n", __LINE__); + + frame.deref(); + + SimpleModule::process( *f ); + } + + Frame* createNewFrame() + { + FFMpegStreamPacket *packet = new FFMpegStreamPacket; + packet->packet = new AVPacket; + packet->packet->data = new unsigned char[65536]; + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + return new Frame( "FRAME_ID_MPEG1_VIDEO_PACKET", packet ); + } + + void reuseFrame( Frame *frame ) + { + FFMpegStreamPacket *packet = (FFMpegStreamPacket*)frame->data(); + packet->packet->size = 65536; + packet->packet->pts = AV_NOPTS_VALUE; + packet->packet->flags = 0; + //av_free_packet( packet->packet ); + //delete packet->packet; + } + + const char *name() { return "Mpeg Video Encoder"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_MPEG1_VIDEO_PACKET"; } + bool isBlocking() { return true; } + +private: + AVCodecContext *videoCodecContext; +}; diff --git a/research/pipeline/Modules/OSSRenderer.cpp b/research/pipeline/Modules/OSSRenderer.cpp new file mode 100644 index 0000000..1757af3 --- /dev/null +++ b/research/pipeline/Modules/OSSRenderer.cpp @@ -0,0 +1,42 @@ + +class OSSRenderer : public SimpleModule { +public: + OSSRenderer() { } + + void init(); + void process( const Frame &f ); + + const char *name() { return "OSS Renderer"; } + Format inputFormat() { return "FRAME_ID_PCM_AUDIO_DATA"; } + Format outputFormat() { return "FRAME_ID_RENDERED_AUDIO"; } + bool isBlocking() { return true; } + +private: + int fd; +}; + + +void OSSRenderer::init() +{ + // Initialize OSS + fd = open( "/dev/dsp", O_WRONLY ); + + int format = AFMT_S16_LE; + ioctl( fd, SNDCTL_DSP_SETFMT, &format ); + + int channels = 2; + ioctl( fd, SNDCTL_DSP_CHANNELS, &channels ); + + int speed = 44100; + ioctl( fd, SNDCTL_DSP_SPEED, &speed ); +} + +void OSSRenderer::process( const Frame &frame ) +{ + // Render PCM to device + PCMData *pcm = (PCMData*)frame.data(); + if ( write( fd, pcm->data, pcm->size ) == -1 ) + perror( "OSSRenderer::process( Frame )" ); + frame.deref(); +} + diff --git a/research/pipeline/Modules/RoutingModule.cpp b/research/pipeline/Modules/RoutingModule.cpp new file mode 100644 index 0000000..fcc342a --- /dev/null +++ b/research/pipeline/Modules/RoutingModule.cpp @@ -0,0 +1,28 @@ + + +class RoutingModule : public SimpleModule { +public: + RoutingModule() { } + +// bool supportsOutputType(Format type) { return outputFormat() == type; } + + void process( const Frame &frame ) + { + dispatch( routes[Format(frame.id())], Process, &frame ); + } + + void connectTo( Module *next, const Frame &f ) + { + setRoute( next->inputFormat(), next ); + } + +private: + void setRoute( Format t, Module* m ) + { + routes[Format(t)] = m; + } + + map routes; +}; + + diff --git a/research/pipeline/Modules/SimpleModule.cpp b/research/pipeline/Modules/SimpleModule.cpp new file mode 100644 index 0000000..844cc61 --- /dev/null +++ b/research/pipeline/Modules/SimpleModule.cpp @@ -0,0 +1,100 @@ +#include "Types/Module.hpp" +#include + +class SimpleModule : public Module { +public: + SimpleModule() : next( 0 ) { } + + bool isBlocking() { return false; } + std::list
threadAffinity() { } + + bool supportsOutputType(Format type) + { + return outputFormat() == type; + } + + virtual void init() = 0; + + void command( Commands command, const void *arg ) + { + switch (command) { + case Process: + process( *((Frame *)arg) ); + break; + case Simulate: + simulate( *((Frame *)arg) ); + break; + case Deref: + ((Frame *)arg)->deref(); + break; + case Init: + init(); + break; + } + } + + void dispatch( Address address, Commands command, const void *arg ) + { + if ( address ) + staticDispatch( address, command, arg ); + else if ( pipelineMgr && ( command == Process || command == Simulate ) ) + pipelineMgr->unconnectedRoute( this, *(const Frame *)arg ); + } + + virtual void derefFrame( Frame *frame ) + { + dispatch( prev, Deref, frame ); + } + + virtual void process( const Frame &frame ) + { + dispatch( next, Process, &frame ); + } + + virtual void simulate( const Frame &frame ) + { + process( frame ); + } + + void connectTo( Address n, const Frame &f ) + { + next = n; + } + + void connectedFrom( Address n, const Frame &f ) + { + prev = n; + } + + Frame *getAvailableFrame() + { + Frame *frame; + std::list::iterator it; + for ( it = used.begin(); it != used.end(); ++it ) { + frame = *it; + if ( frame->refcount() == 0 ) { + reuseFrame( frame ); + frame->ref(); + return frame; + } + } + frame = createNewFrame(); + frame->ref(); + used.push_back( frame ); + return frame; + } + + virtual Frame* createNewFrame() + { + return new Frame; + } + + virtual void reuseFrame( Frame *frame ) + { } + +private: + std::list used; + Module *next; + Module *prev; +}; + diff --git a/research/pipeline/Modules/SplitterModule.cpp b/research/pipeline/Modules/SplitterModule.cpp new file mode 100644 index 0000000..d0fa215 --- /dev/null +++ b/research/pipeline/Modules/SplitterModule.cpp @@ -0,0 +1,37 @@ + + +class Splitter : public SimpleModule { +public: + Splitter() + { + } + + void init() + { + } + + void process( const Frame &frame ) + { + list::iterator it = routes.begin(); + while( it != routes.end() ) { + if ( it != routes.begin() ) + frame.ref(); + dispatch( (*it), Process, &frame ); + ++it; + } + } + + void connectTo( Module *next, const Frame &f ) + { + routes.push_back( next ); + } + + const char *name() { return "Splitter"; } + Format inputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } + +private: + list routes; +}; + diff --git a/research/pipeline/Modules/ThreadBoundaryModule.cpp b/research/pipeline/Modules/ThreadBoundaryModule.cpp new file mode 100644 index 0000000..e4b07d4 --- /dev/null +++ b/research/pipeline/Modules/ThreadBoundaryModule.cpp @@ -0,0 +1,89 @@ + +/* + +class Consumer : public RoutingModule { +public: + Consumer( CommandQueue* b, Format format ) + : RoutingModule(), buffer( b ), formatId( format ) + { } + + void init() + { + } + + void start() + { + for (;;) { + const Command &command = buffer->remove(); + RoutingModule::command( command.command, command.arg ); + } + } + + const char* name() { return "Consumer"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue *buffer; + Format formatId; +}; + +class ConsumerThread : public Thread { +public: + ConsumerThread( Consumer *c ) + : consumer( c ) + { } + + void execute( void* ) + { + consumer->start(); + } + +private: + Consumer *consumer; +}; + + +class ThreadBoundryModule : public RoutingModule { +public: + ThreadBoundryModule( int size, Format format ) + : RoutingModule(), readCommandQueue( size ), consumer( &readCommandQueue, format ), + consumerThread( &consumer ), formatId( format ) + { + } + + void init() + { + } + + void connectTo( Module *m, const Frame &f ) + { + consumer.connectTo( m, f ); + consumerThread.start(0); + } + + void process( const Frame &frame ) + { + readCommandQueue.add( frame ); + } + + const char *name() { return "Thread Boundry Module"; } + Format inputFormat() { return formatId; } + Format outputFormat() { return formatId; } + +private: + CommandQueue readCommandQueue; + Consumer consumer; + ConsumerThread consumerThread; + Format formatId; +}; + + +class ProcessBoundryThing : public DispatchInterface { +public: + void dispatch( Command *command ) + { + } +}; + +*/ diff --git a/research/pipeline/Modules/VideoCameraSourceModule.cpp b/research/pipeline/Modules/VideoCameraSourceModule.cpp new file mode 100644 index 0000000..deef2f9 --- /dev/null +++ b/research/pipeline/Modules/VideoCameraSourceModule.cpp @@ -0,0 +1,101 @@ + + +/* +class VideoCameraSourceModule : public SimpleModule { +public: + VideoCameraSourceModule() + { + } + + void init() + { + av_register_all(); + } + + void process( const Frame &frame ) { + AVFormatContext *avFormatContext = 0; + AVFormatParameters vp1, *vp = &vp1; + AVInputFormat *fmt1; + memset(vp, 0, sizeof(*vp)); + fmt1 = av_find_input_format("video4linux");//video_grab_format); + vp->device = 0;//"/dev/video";//video_device; + vp->channel = 0;//video_channel; + vp->standard = "pal";//"ntsc";//video_standard; + vp->width = WIDTH; + vp->height = HEIGHT; + vp->frame_rate = 50; + vp->frame_rate_base = 1; + if (av_open_input_file(&avFormatContext, "", fmt1, 0, vp) < 0) { + printf("Could not find video grab device\n"); + exit(1); + } + if ((avFormatContext->ctx_flags & AVFMTCTX_NOHEADER) && av_find_stream_info(avFormatContext) < 0) { + printf("Could not find video grab parameters\n"); + exit(1); + } + // Gather stream information + if ( av_find_stream_info(avFormatContext) < 0 ) { + printf("error getting stream info\n"); + return; + } + +// AVCodecContext *videoCodecContext = avcodec_alloc_context(); + AVCodecContext *videoCodecContext = &avFormatContext->streams[0]->codec; + AVCodec *codec = avcodec_find_decoder(avFormatContext->streams[0]->codec.codec_id); + + if ( !codec ) { + printf("error finding decoder\n"); + return; + } + + printf("found decoder: %s\n", codec->name); + + avFormatContext->streams[0]->r_frame_rate = vp->frame_rate; + avFormatContext->streams[0]->r_frame_rate_base = vp->frame_rate_base; + + videoCodecContext->width = vp->width; + videoCodecContext->height = vp->height; + +// if ( avcodec_open( videoCodecContext, &rawvideo_decoder ) < 0 ) { + if ( avcodec_open( videoCodecContext, codec ) < 0 ) { // is rawvideo_decoder + printf("error opening context\n"); + videoCodecContext = 0; + } + + if ( !videoCodecContext ) { + printf("can't process video data without a context\n"); + return; + } + + AVPacket pkt; + while( avFormatContext ) { + if ( av_read_frame(avFormatContext, &pkt) < 0 ) + printf("error reading packet\n"); + else { + AVFrame *picture = avcodec_alloc_frame(); + YUVFrame *yuvFrame = new YUVFrame; + yuvFrame->pic = picture; + Frame *currentFrame = new Frame( "FRAME_ID_YUV_VIDEO_FRAME", yuvFrame ); + currentFrame->ref(); + + int gotPicture = 0; + avcodec_decode_video( videoCodecContext, picture, &gotPicture, pkt.data, pkt.size ); + + if ( gotPicture ) { + yuvFrame->fmt = videoCodecContext->pix_fmt; // is PIX_FMT_YUV422 + yuvFrame->width = videoCodecContext->width; + yuvFrame->height = videoCodecContext->height; +// printf("showing frame: %i %ix%i\n", yuvFrame->fmt, yuvFrame->width, yuvFrame->height ); + SimpleModule::process( *currentFrame ); + } + } + } + } + + const char *name() { return "Video Camera Source"; } + Format inputFormat() { return "FRAME_ID_VIDEO_CAMERA_SOURCE"; } + Format outputFormat() { return "FRAME_ID_YUV_VIDEO_FRAME"; } + bool isBlocking() { return true; } +}; +*/ + diff --git a/research/pipeline/PipelineManager.cpp b/research/pipeline/PipelineManager.cpp new file mode 100644 index 0000000..e003559 --- /dev/null +++ b/research/pipeline/PipelineManager.cpp @@ -0,0 +1,162 @@ + +class PipelineManager : public Thread { +public: + PipelineManager(); + void addSource( Format frameType ); + void addDestination( Format frameType ); + void clearTargets(); + void connectTogether(Module *m1, Module *m2, const Frame &f); + void makeConnections(Module *start); + void start( Frame *frame ) { Thread::start( (void *)frame ); } + void execute( void *p ); + void unconnectedRoute( Module *m, const Frame &f ); +private: + std::list sourceModules; + std::list destinationModules; + std::list source; + std::list destination; +}; + + +PipelineManager *pipelineMgr = 0; + + +PipelineManager::PipelineManager() +{ +} + +/* +void PipelineManager::newModule( Module *m ) +{ + printf("adding module: %s\n", m->name() ); + + allModules.push_front( m ); + + // update source modules list + for ( list::iterator it = source.begin(); it != source.end(); ++it ) { + if ( (*it) == m->inputFormat() ) { + sourceModules.push_front( m ); + // Just add it once + break; + } + } + + // update destination modules list + for ( list::iterator it = destination.begin(); it != destination.end(); ++it ) { + if ( (*it) == m->outputFormat() ) { + destinationModules.push_front( m ); + // Just add it once + break; + } + } +} +*/ + +void PipelineManager::addSource( Format frameType ) +{ + // update source modules list + Module *m = moduleMapper()->findModuleWithInputFormat( frameType ); + if ( m ) { + printf("adding source: %s\n", (const char *)frameType ); + source.push_front( frameType ); + sourceModules.push_front( m ); + } else { + printf("No source for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::addDestination( Format frameType ) +{ + Module *m = moduleMapper()->findModuleWithOutputFormat( frameType ); + if ( m ) { + printf("adding destination: %s\n", (const char *)frameType ); + destination.push_front( frameType ); + destinationModules.push_front( m ); + } else { + printf("No destination for %s found!!!\n", (const char *)frameType ); + } +} + +void PipelineManager::clearTargets() +{ + sourceModules.clear(); + destinationModules.clear(); + source.clear(); + destination.clear(); +} + +void PipelineManager::connectTogether( Module *m1, Module *m2, const Frame &f ) +{ +/* + //printf(" [%s] -> [%s] %s", m1->outputFormat(), m2->inputFormat(), m2->name() ); + printf(" -> %s", m2->name() ); + + staticDispatch( m2, Init, 0 ); + + if ( m2->isBlocking() || m1->isBlocking() ) { + ThreadBoundryModule *threadModule = new ThreadBoundryModule( 32, m2->inputFormat() ); + threadModule->init(); + m1->connectTo( threadModule, f ); + threadModule->connectTo( m2, f ); + } else { + m1->connectTo( m2, f ); + } +*/ +} + +/* + Connects together module with a module that can process the frame + and then gets the module to process this first frame +*/ +void PipelineManager::unconnectedRoute( Module *m, const Frame &f ) +{ + Module *m2 = moduleMapper()->findModuleWithInputFormat( f.id() ); + if ( m2 ) { + //connectTogether( m, m2, f ); + printf("Connecting together: %s -> %s\n", m->name(), m2->name() ); + staticDispatch( m2, Init, 0 ); + m->connectTo( m2, f ); + m2->connectedFrom( m, f ); + staticDispatch( m2, Process, &f ); + } else { + printf("Didn't find route for %s\n", m->name()); + } +} + +void PipelineManager::makeConnections( Module *start ) +{ +/* + printf("making connections:\n"); + + Frame frame( "UNKNOWN", 0 ); + Module *currentModule = start; + Format dstFmt = destination.front(); + + dispatch( currentModule, Init, 0 ); + printf(" %s (pid: %i)", currentModule->name(), getpid() ); + + while ( currentModule->outputFormat() != dstFmt ) { + Module *m = moduleMapper()->findModuleWithInputFormat( currentModule->outputFormat() ); + if ( m ) { + connectTogether( currentModule, m, frame ); + currentModule = m; + } else { + break; + } + } + printf("\n"); +*/ +} + + +void PipelineManager::execute( void *d ) +{ + printf("starting...\n"); + for ( list::iterator it = sourceModules.begin(); it != sourceModules.end(); ++it ) { + //makeConnections( (*it) ); + staticDispatch( (*it), Init, 0 ); + staticDispatch( (*it), Process, d ); + } +} + + diff --git a/research/pipeline/README.md b/research/pipeline/README.md new file mode 100644 index 0000000..8df026f --- /dev/null +++ b/research/pipeline/README.md @@ -0,0 +1,30 @@ + + +Example sources to support: + +file:/home/user/Documents/images/jpeg/picture.jpg +file:/home/user/Documents/audio/mpeg/greatestsong.mp3 +file:/home/user/Documents/application/playlist/favourites.pls +file:/home/user/Documents/application/playlist/favourites.mpu +http://www.slashdot.org/somefile.mpg +http://www.streaming_radio_server.net:9000 +http://www.streaming_tv_server.net:9000 +camera +microphone +camera & microphone + + +Example outputs to support: + +File/URL +UDP packets +TCP/IP packets +OSS +Alsa +QSS +Visualiser +QDirectPainter +QPainter +XShm +DirectDraw +YUV acceleration diff --git a/research/pipeline/Types/Deadcode.cpp b/research/pipeline/Types/Deadcode.cpp new file mode 100644 index 0000000..d08e52a --- /dev/null +++ b/research/pipeline/Types/Deadcode.cpp @@ -0,0 +1,140 @@ + + +#if 0 + +1 = registerNewFormat("AAC", ".aac", "An AAC decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MP3 decoder", AUDIO_CODEC); +2 = registerNewFormat("MP3", ".mp3", "MAD decoder", AUDIO_CODEC); +1 = registerNewFormat("AAC", ".aac", "My AAC decoder", AUDIO_CODEC); +3 = registerNewFormat("3DS", ".3ds", "3D Studio File", AUDIO_CODEC); + +enum FormatType { + FRAME_ID_FILE_PROTO, + FRAME_ID_HTTP_PROTO, + FRAME_ID_RTSP_PROTO, + FRAME_ID_RTP_PROTO, + FRAME_ID_MMS_PROTO, + + FRAME_ID_GIF_FORMAT, + FRAME_ID_JPG_FORMAT, + FRAME_ID_PNG_FORMAT, + + FRAME_ID_MP3_FORMAT, + FRAME_ID_WAV_FORMAT, + FRAME_ID_GSM_FORMAT, + FRAME_ID_AMR_FORMAT, + + FRAME_ID_MPG_FORMAT, + FRAME_ID_AVI_FORMAT, + FRAME_ID_MP4_FORMAT, + FRAME_ID_MOV_FORMAT, + + FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG_AUDIO_PACKET = FRAME_ID_FIRST_PACKET_TYPE, + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET, + FRAME_ID_GSM_AUDIO_PACKET, + FRAME_ID_AMR_AUDIO_PACKET, + FRAME_ID_AAC_AUDIO_PACKET, + FRAME_ID_LAST_PACKET_TYPE = FRAME_ID_AMR_AUDIO_PACKET, + + FRAME_ID_VIDEO_PACKET, + FRAME_ID_AUDIO_PACKET, + + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME, + + FRAME_ID_PCM_AUDIO_DATA, + + FRAME_ID_RENDERED_AUDIO, + FRAME_ID_RENDERED_VIDEO, + + FRAME_ID_URL_SOURCE, + FRAME_ID_AUDIO_SOURCE, + FRAME_ID_VIDEO_SOURCE, + + FRAME_ID_MULTIPLE_FORMAT, + FRAME_ID_ANY_ONE_OF_FORMAT, + + FRAME_ID_MULTIPLE_PACKET, + FRAME_ID_ANY_ONE_OF_PACKET, + + FRAME_ID_UNKNOWN +}; + +typedef struct FRAME_GENERIC { +/* + int generalId; + int specificId; + int streamId; +*/ + int bytes; + char* bits; + int pts; +}; + +enum videoCodecId { + FRAME_ID_MPEG1_VIDEO_PACKET, + FRAME_ID_MPEG2_VIDEO_PACKET, + FRAME_ID_MPEG4_VIDEO_PACKET, + FRAME_ID_QT_VIDEO_PACKET +}; + +typedef struct FRAME_VIDEO_PACKET { + int codecId; + int bytes; + char* bits; +}; + +enum videoFrameFormat { + FRAME_ID_YUV420_VIDEO_FRAME, + FRAME_ID_YUV422_VIDEO_FRAME, + FRAME_ID_RGB16_VIDEO_FRAME, + FRAME_ID_RGB24_VIDEO_FRAME, + FRAME_ID_RGB32_VIDEO_FRAME +}; + +typedef struct FRAME_VIDEO_FRAME { + int format; + int width; + int height; + int bytes; + char* bits; +}; + +struct UpPCMPacket { + int freq; + int bitsPerSample; + int size; + char data[1]; +}; + +struct DownPCMPacket { + +}; + +#endif + + + +/* +struct StreamPacket { + void *private; // AVPacket *pkt; + int streamId; + int size; + char *data; +}; +*/ + +/* +struct StreamPacket { + int streamId; + Frame frame; +}; +*/ + diff --git a/research/pipeline/Types/Format.hpp b/research/pipeline/Types/Format.hpp new file mode 100644 index 0000000..72642b6 --- /dev/null +++ b/research/pipeline/Types/Format.hpp @@ -0,0 +1,29 @@ +#pragma once +#include + +// Format +class Format +{ +public: + Format() : s(nullptr) { } + Format(const Format &other) : s( other.s ) { } + Format(const char *str) : s( str ) { } + bool operator==(const Format& other) + { + return !std::strcmp(other.s, s); + } + operator const char *() + { + return s; + } + bool operator==(const char *other) + { + return !std::strcmp(s, other); + } + bool operator<(const Format& other) const + { + return std::strcmp(other.s, s) < 0; + } +private: + const char *s; +}; diff --git a/research/pipeline/Types/Frame.hpp b/research/pipeline/Types/Frame.hpp new file mode 100644 index 0000000..35ddb08 --- /dev/null +++ b/research/pipeline/Types/Frame.hpp @@ -0,0 +1,51 @@ +#pragma once +#include +#include "Format.hpp" + +// Frame +class Frame +{ +public: + Frame() { } + + Frame( const char* id, void* data ) + : counter( 0 ) + , type( id ) + , bits( data ) + { + pthread_mutex_init( &mutex, NULL ); + } + + void ref() const + { + pthread_mutex_lock( &mutex ); + ++counter; + pthread_mutex_unlock( &mutex ); + } + + void deref() const + { + pthread_mutex_lock( &mutex ); + --counter; + pthread_mutex_unlock( &mutex ); + } + + int refcount() const + { + int ret; + pthread_mutex_lock( &mutex ); + ret = counter; + pthread_mutex_unlock( &mutex ); + return ret; + } + + Format id() const { return type; } + void* data() const { return bits; } + +private: + mutable pthread_mutex_t mutex; + mutable int counter; + Format type; + void *bits; +}; + diff --git a/research/pipeline/Types/Module.hpp b/research/pipeline/Types/Module.hpp new file mode 100644 index 0000000..f0ad0fc --- /dev/null +++ b/research/pipeline/Types/Module.hpp @@ -0,0 +1,118 @@ +#pragma once +#include +#include +#include +#include "Frame.hpp" +#include "Format.hpp" + +class Module; + +enum Commands { Init, Pull, Deref, Process, Simulate, ConnectToModule, ConnectedFrom }; + +typedef Module *Address; + +struct Command { + Address address; + Commands command; + const void *arg; +}; + +// CommandQueue +class CommandQueue { +public: + CommandQueue( int size ); + + void add( const Command & ); + const Command &remove(); + +private: + int max; + const Command **commands; + int in, out; + + pthread_mutex_t mutex; + sem_t free; + sem_t used; +}; + +CommandQueue::CommandQueue( int size ) + : max( size ), in( 0 ), out( 0 ) +{ + commands = new const Command*[max]; + pthread_mutex_init( &mutex, NULL ); + sem_init( &free, 0, max ); + sem_init( &used, 0, 0 ); +} + +void CommandQueue::add( const Command &command ) +{ + while( sem_wait( &free ) != 0 ); + pthread_mutex_lock( &mutex ); + + commands[in] = &command; + in = ( in + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &used ); +} + +const Command &CommandQueue::remove() +{ + while( sem_wait( &used ) != 0 ); + pthread_mutex_lock( &mutex ); + + const Command *command = commands[out]; + out = ( out + 1 ) % max; + + pthread_mutex_unlock( &mutex ); + sem_post( &free ); + + return *command; +} + + + +class ModuleFactory { +public: + ModuleFactory() { } + + virtual const char *name() = 0; + + virtual std::list
threadAffinity() = 0; + virtual bool isBlocking() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; + virtual bool supportsInputFormat( Format ) = 0; + virtual bool supportsOutputFormat( Format ) = 0; + + virtual Module *createInstance() = 0; +}; + + + +// Modules +class Module { +public: + Module() { } + + virtual const char *name() = 0; + virtual Format inputFormat() = 0; + virtual Format outputFormat() = 0; +// virtual bool constFrameProcessing() = 0; + +// virtual bool supportsInputType( Format ) = 0; + virtual bool supportsOutputType( Format ) = 0; + +// virtual list inputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } +// virtual list outputFormats() { list t; t.push_back(FRAME_ID_UNKNOWN); return t; } + + virtual bool isBlocking() = 0;//{ return false; } + virtual std::list
threadAffinity() = 0; + +// virtual void command( Command command, const void *arg, bool priorityFlag ) = 0; + virtual void command( Commands command, const void *arg ) = 0; + + virtual void connectTo( Module *next, const Frame &f ) = 0; + virtual void connectedFrom( Module *next, const Frame &f ) = 0; +}; + diff --git a/research/pipeline/Types/PCMData.hpp b/research/pipeline/Types/PCMData.hpp new file mode 100644 index 0000000..57de038 --- /dev/null +++ b/research/pipeline/Types/PCMData.hpp @@ -0,0 +1,7 @@ +#pragma once + +struct PCMData +{ + int size; + char data[65536]; +}; diff --git a/research/pipeline/Types/Thread.hpp b/research/pipeline/Types/Thread.hpp new file mode 100644 index 0000000..d7922a2 --- /dev/null +++ b/research/pipeline/Types/Thread.hpp @@ -0,0 +1,41 @@ +#pragma once + +// Utils +class Thread { +public: + Thread(); + int start( void* arg ); + +protected: + int run( void* arg ); + static void* entryPoint( void* ); + virtual void setup() { }; + virtual void execute( void* ) = 0; + void* arg() const { return arg_; } + void setArg( void* a ) { arg_ = a; } + +private: + pthread_t tid_; + void* arg_; +}; + +Thread::Thread() {} + +int Thread::start( void* arg ) +{ + setArg(arg); + return pthread_create( &tid_, 0, Thread::entryPoint, this ); +} + +int Thread::run( void* arg ) +{ + printf(" (pid: %i)", getpid() ); + setup(); + execute( arg ); +} + +void* Thread::entryPoint( void* pthis ) +{ + Thread* pt = (Thread*)pthis; + pt->run( pt->arg() ); +} diff --git a/research/pipeline/Types/YUVFrame.hpp b/research/pipeline/Types/YUVFrame.hpp new file mode 100644 index 0000000..109f9a4 --- /dev/null +++ b/research/pipeline/Types/YUVFrame.hpp @@ -0,0 +1,16 @@ +#pragma once +#include "libavcodec/avcodec.h" + + +struct YUVFrame { + int width; + int height; + enum AVPixelFormat fmt; + AVFrame *pic; +/* + uchar *y; + uchar *u; + uchar *v; + int scanlineWidth[3]; +*/ +}; diff --git a/research/pipeline/prototype.cpp b/research/pipeline/prototype.cpp new file mode 100755 index 0000000..a0c03f9 --- /dev/null +++ b/research/pipeline/prototype.cpp @@ -0,0 +1,172 @@ +/* + + Project Carmack 0.01 (AKA Media Library Prototype 01/02) + Copyright John Ryland, 2005 + +*/ + +using namespace std; + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include "Types/Frame.hpp" +#include "Types/Thread.hpp" +#include "Types/Module.hpp" + + +#define WIDTH 160 +#define HEIGHT 120 + + + + +/* +class ModulesThread : public Thread, public DispatchInterface { +public: + void execute( void* ) + { + for (;;) { + CommandStruct *command = buffer.remove(); + command->module->command( command->command, command->arg ); + } + } + + void dispatch( CommandStruct *command ) + { + buffer.add( command ); + } + +private: + CommandQueue buffer; +}; +*/ + + + + +static void staticDispatch( Address address, Commands command, const void *arg ) +{ + moduleMapper()->dispatchCommand( address, command, arg ); +} + + + + +struct FFMpegStreamPacket { + AVPacket *packet; +}; + + + + + +void ProcessMessages(); + + + + + + + +Module *a, *b, *c, *d; + + +void registerModules() +{ + moduleMapper()->addModule( new OSSRenderer ); +// moduleMapper()->addModule( d = new YUVRenderer ); + moduleMapper()->addModule( d = new DirectDrawRenderer ); + moduleMapper()->addModule( new MP3DecodeModule ); +// moduleMapper()->addModule( new FFMpegMuxModule ); + moduleMapper()->addModule( new MpegDecodeModule ); +// moduleMapper()->addModule( new MP3SourceModule ); +// moduleMapper()->addModule( new StreamDemuxModule ); + moduleMapper()->addModule( c = new MpegEncodeModule ); +// moduleMapper()->addModule( b = new Splitter ); + moduleMapper()->addModule( new FFMpegSourceModule ); +// moduleMapper()->addModule( a = new VideoCameraSourceModule ); +} + +void playFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_AUDIO" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + //pipelineMgr->start( &file ); + pipelineMgr->execute( &file ); +} + + +void displayCamera() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +void reEncodeFile( const char *filename ) +{ + pipelineMgr->addSource( "FRAME_ID_URL_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + + int length = strlen(filename) + 1; + Frame file( "FRAME_ID_URL_SOURCE", memcpy(new char[length], filename, length) ); + file.ref(); + + pipelineMgr->start( &file ); +} + +void recordVideo() +{ + pipelineMgr->addSource( "FRAME_ID_VIDEO_CAMERA_SOURCE" ); + pipelineMgr->addDestination( "FRAME_ID_URL_SINK" ); + pipelineMgr->addDestination( "FRAME_ID_RENDERED_VIDEO" ); + pipelineMgr->start( new Frame( "FRAME_ID_VIDEO_CAMERA_SOURCE", 0 ) ); +} + +int main( int argc, char** argv ) +{ + registerModules(); + pipelineMgr = new PipelineManager; +/* + Frame f; + printf("Connecting together: %s -> %s\n", a->name(), b->name() ); + staticDispatch( b, Init, 0 ); + a->connectTo( b, f ); +// b->connectedFrom( a, f ); + + printf("Connecting together: %s -> %s\n", b->name(), c->name() ); + staticDispatch( c, Init, 0 ); + b->connectTo( c, f ); + + printf("Connecting together: %s -> %s\n", b->name(), d->name() ); + staticDispatch( d, Init, 0 ); + b->connectTo( d, f ); +*/ + playFile( (argc > 1) ? argv[1] : "test.mpg" ); + //reEncodeFile( (argc > 1) ? argv[1] : "test.mpg" ); + //displayCamera(); + //recordVideo(); +} + diff --git a/research/string-tables/.gitignore b/research/string-tables/.gitignore new file mode 100644 index 0000000..7c6ad91 --- /dev/null +++ b/research/string-tables/.gitignore @@ -0,0 +1,61 @@ +build/cmake_install.cmake +build/CMakeCache.txt +build/compile_commands.json +build/FixedStrings.inl +build/libProgram.a +build/libStringsTable.a +build/Makefile +build/StringsTableTest +build/CMakeFiles/cmake.check_cache +build/CMakeFiles/CMakeDirectoryInformation.cmake +build/CMakeFiles/CMakeOutput.log +build/CMakeFiles/CMakeRuleHashes.txt +build/CMakeFiles/feature_tests.bin +build/CMakeFiles/feature_tests.c +build/CMakeFiles/feature_tests.cxx +build/CMakeFiles/Makefile.cmake +build/CMakeFiles/Makefile2 +build/CMakeFiles/progress.marks +build/CMakeFiles/TargetDirectories.txt +build/CMakeFiles/3.5.1/CMakeCCompiler.cmake +build/CMakeFiles/3.5.1/CMakeCXXCompiler.cmake +build/CMakeFiles/3.5.1/CMakeDetermineCompilerABI_C.bin +build/CMakeFiles/3.5.1/CMakeDetermineCompilerABI_CXX.bin +build/CMakeFiles/3.5.1/CMakeSystem.cmake +build/CMakeFiles/3.5.1/CompilerIdC/a.out +build/CMakeFiles/3.5.1/CompilerIdC/CMakeCCompilerId.c +build/CMakeFiles/3.5.1/CompilerIdCXX/a.out +build/CMakeFiles/3.5.1/CompilerIdCXX/CMakeCXXCompilerId.cpp +build/CMakeFiles/Program.dir/build.make +build/CMakeFiles/Program.dir/cmake_clean_target.cmake +build/CMakeFiles/Program.dir/cmake_clean.cmake +build/CMakeFiles/Program.dir/CXX.includecache +build/CMakeFiles/Program.dir/depend.internal +build/CMakeFiles/Program.dir/depend.make +build/CMakeFiles/Program.dir/DependInfo.cmake +build/CMakeFiles/Program.dir/flags.make +build/CMakeFiles/Program.dir/link.txt +build/CMakeFiles/Program.dir/program.cpp.o +build/CMakeFiles/Program.dir/progress.make +build/CMakeFiles/StringsTable.dir/build.make +build/CMakeFiles/StringsTable.dir/cmake_clean_target.cmake +build/CMakeFiles/StringsTable.dir/cmake_clean.cmake +build/CMakeFiles/StringsTable.dir/CXX.includecache +build/CMakeFiles/StringsTable.dir/depend.internal +build/CMakeFiles/StringsTable.dir/depend.make +build/CMakeFiles/StringsTable.dir/DependInfo.cmake +build/CMakeFiles/StringsTable.dir/FixedStrings.cpp.o +build/CMakeFiles/StringsTable.dir/flags.make +build/CMakeFiles/StringsTable.dir/link.txt +build/CMakeFiles/StringsTable.dir/progress.make +build/CMakeFiles/StringsTableTest.dir/build.make +build/CMakeFiles/StringsTableTest.dir/cmake_clean.cmake +build/CMakeFiles/StringsTableTest.dir/CXX.includecache +build/CMakeFiles/StringsTableTest.dir/depend.internal +build/CMakeFiles/StringsTableTest.dir/depend.make +build/CMakeFiles/StringsTableTest.dir/DependInfo.cmake +build/CMakeFiles/StringsTableTest.dir/flags.make +build/CMakeFiles/StringsTableTest.dir/link.txt +build/CMakeFiles/StringsTableTest.dir/main.cpp.o +build/CMakeFiles/StringsTableTest.dir/progress.make +README.pdf diff --git a/research/string-tables/.vscode/Code.code-workspace b/research/string-tables/.vscode/Code.code-workspace new file mode 100644 index 0000000..c7e938e --- /dev/null +++ b/research/string-tables/.vscode/Code.code-workspace @@ -0,0 +1,49 @@ +{ + "folders": [ + { + "path": ".." + }, + { + "path": "../../framework" + } + ], + "settings": { + "files.associations": { + "*.tpp": "cpp", + "functional": "cpp", + "optional": "cpp", + "array": "cpp", + "*.tcc": "cpp", + "cctype": "cpp", + "clocale": "cpp", + "cmath": "cpp", + "cstdarg": "cpp", + "cstdint": "cpp", + "cstdio": "cpp", + "cstdlib": "cpp", + "cwchar": "cpp", + "cwctype": "cpp", + "deque": "cpp", + "unordered_map": "cpp", + "vector": "cpp", + "exception": "cpp", + "algorithm": "cpp", + "system_error": "cpp", + "tuple": "cpp", + "type_traits": "cpp", + "fstream": "cpp", + "initializer_list": "cpp", + "iosfwd": "cpp", + "istream": "cpp", + "limits": "cpp", + "new": "cpp", + "ostream": "cpp", + "numeric": "cpp", + "sstream": "cpp", + "stdexcept": "cpp", + "streambuf": "cpp", + "utility": "cpp", + "typeinfo": "cpp" + } + } +} \ No newline at end of file diff --git a/research/string-tables/.vscode/c_cpp_properties.json b/research/string-tables/.vscode/c_cpp_properties.json new file mode 100644 index 0000000..1b72752 --- /dev/null +++ b/research/string-tables/.vscode/c_cpp_properties.json @@ -0,0 +1,42 @@ +{ + "configurations": [ + { + "name": "Linux", + "includePath": [ + "${workspaceFolder}/**", + "/usr/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + }, + "defines": [], + "compilerPath": "/usr/bin/clang", + "cStandard": "c11", + "cppStandard": "c++14", + "intelliSenseMode": "gcc-x64", + "compileCommands": "${workspaceFolder}/build/compile_commands.json" + }, + { + "name": "Mac", + "includePath": [ + "/usr/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + } + }, + { + "name": "Win32", + "includePath": [ + "c:/Program Files (x86)/Microsoft Visual Studio 14.0/VC/include" + ], + "browse": { + "limitSymbolsToIncludedHeaders": true, + "databaseFilename": "" + } + } + ], + "version": 4 +} \ No newline at end of file diff --git a/research/string-tables/.vscode/launch.json b/research/string-tables/.vscode/launch.json new file mode 100644 index 0000000..ff5abd3 --- /dev/null +++ b/research/string-tables/.vscode/launch.json @@ -0,0 +1,32 @@ +{ + // Use IntelliSense to learn about possible attributes. + // Hover to view descriptions of existing attributes. + // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 + "version": "0.2.0", + "configurations": [ + { + "name": "(gdb) Launch", + "type": "cppdbg", + "request": "launch", + "program": "${workspaceFolder}/build/StringsTableTest", + "args": [], + "stopAtEntry": false, + "cwd": "${workspaceFolder}", + "environment": [ + { + "name": "LD_LIBRARY_PATH", + "value": "" + } + ], + "externalConsole": false, + "MIMode": "gdb", + "setupCommands": [ + { + "description": "Enable pretty-printing for gdb", + "text": "-enable-pretty-printing", + "ignoreFailures": true + } + ] + } + ] +} \ No newline at end of file diff --git a/research/string-tables/.vscode/tasks.json b/research/string-tables/.vscode/tasks.json new file mode 100644 index 0000000..64a18e6 --- /dev/null +++ b/research/string-tables/.vscode/tasks.json @@ -0,0 +1,41 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "Build C++ project", + "type": "shell", + "group": "build", + "command": "cd ./build && make", + "problemMatcher": [] + }, + { + "label": "Build & run C++ project", + "type": "shell", + "group": { + "kind": "build", + "isDefault": true + }, + "command": "cd ./build && make && ./StringsTableTest", + "problemMatcher": [] + }, + { + "label": "Build CMake", + "type": "shell", + "group": "build", + "command": "cd ./build && cmake build .." + }, + { + "label": "Compile Markdown", + "type": "shell", + "args": [], + "command": "${command:extension.markdown-pdf: Export (PDF)}", + + "command2": "markdown-it README.md -o README.html", + "presentation": { + "reveal": "never", + "panel": "shared", + }, + "problemMatcher": [] + } + ] +} \ No newline at end of file diff --git a/research/string-tables/CMakeLists.txt b/research/string-tables/CMakeLists.txt new file mode 100644 index 0000000..4faf650 --- /dev/null +++ b/research/string-tables/CMakeLists.txt @@ -0,0 +1,28 @@ +cmake_minimum_required(VERSION 3.5.0) + +# set the project name and version +project(StringsTableTest VERSION 1.0) + +# specify the C++ standard +set(CMAKE_CXX_STANDARD 14) +set(CMAKE_CXX_STANDARD_REQUIRED True) + +# add the Program library +add_library(Program program.cpp) + +# add the StringsTable library +add_library(StringsTable FixedStrings.cpp FixedStrings.inl) +target_include_directories(StringsTable PUBLIC build) + +# add the executable +add_executable(StringsTableTest main.cpp) +target_link_libraries(StringsTableTest PUBLIC Program StringsTable) + +# add generator to create the strings table +add_custom_command( + OUTPUT ${CMAKE_CURRENT_SOURCE_DIR}/FixedStrings.inl + COMMAND ${CMAKE_CXX_COMPILER} ../main.cpp $ -o dummy 2>&1 + | sed -n 's@.\*undefined.\*cFixedStringId_\\\([[:alnum:]_]\*\\\).\*@DEFINE_FIXED_STRING\(\\1\)@p' + | sort | uniq > FixedStrings.inl + DEPENDS Program +) diff --git a/research/string-tables/FixedStrings.cpp b/research/string-tables/FixedStrings.cpp new file mode 100644 index 0000000..8adc729 --- /dev/null +++ b/research/string-tables/FixedStrings.cpp @@ -0,0 +1,33 @@ +#include "FixedStrings.h" + + +#define DEFINE_FIXED_STRING(x) enumStringIdValue_##x, +enum StringIdsEnum +{ +#include "FixedStrings.inl" +enumStringId_Count +}; +#undef DEFINE_FIXED_STRING + + +#define DEFINE_FIXED_STRING(x) DECLARE_FIXED_STRING(x) = enumStringIdValue_##x; +#include "FixedStrings.inl" +#undef DEFINE_FIXED_STRING + + +#define DEFINE_FIXED_STRING(x) case enumStringIdValue_##x: return #x; +const char* FixedStringFromId(int aStringId) +{ + switch (aStringId) + { +#include "FixedStrings.inl" + } + return "null"; +} +#undef DEFINE_FIXED_STRING + + +int StringTableSize() +{ + return enumStringId_Count; +} diff --git a/research/string-tables/FixedStrings.h b/research/string-tables/FixedStrings.h new file mode 100644 index 0000000..7b86833 --- /dev/null +++ b/research/string-tables/FixedStrings.h @@ -0,0 +1,18 @@ +#pragma once +#ifndef FIXED_STRINGS_H +#define FIXED_STRINGS_H + +// A 'fixed-string' is a compile time string which is +// stored in the read only section of the executable +// and is available as an int. The mapping is fixed +// at compile time. The retrieval of the static string +// that the id maps to is thread safe. + +#define FIXED_STRING_ID(x) cFixedStringId_##x +#define DECLARE_FIXED_STRING(x) extern const int FIXED_STRING_ID(x) + +// Thread-safe +extern const char* FixedStringFromId(int aFixedStringId); +extern int StringTableSize(); + +#endif // FIXED_STRINGS_H diff --git a/research/string-tables/README.md b/research/string-tables/README.md new file mode 100644 index 0000000..b188761 --- /dev/null +++ b/research/string-tables/README.md @@ -0,0 +1,18 @@ + +StringsTableTest +---------------- + +A _fixed-string_ is a compile time string which is +stored in the read only section of the executable +and is available as an int. The mapping is fixed +at compile time. The retrieval of the static string +that the id maps to is thread safe. + +This test shows how this can be integrated with +cmake to be able to at compile time find all the +strings and place them in the strings table. + +This could be useful for a fixed size union that +contains various types as well as string ids which +refer to fixed strings. + diff --git a/research/string-tables/main.cpp b/research/string-tables/main.cpp new file mode 100644 index 0000000..c8f6727 --- /dev/null +++ b/research/string-tables/main.cpp @@ -0,0 +1,7 @@ +extern void program(); + +int main(int argc, char* argv[]) +{ + program(); + return 0; +} diff --git a/research/string-tables/program.cpp b/research/string-tables/program.cpp new file mode 100644 index 0000000..4076c6c --- /dev/null +++ b/research/string-tables/program.cpp @@ -0,0 +1,22 @@ +#include +#include "FixedStrings.h" + +void program() +{ + DECLARE_FIXED_STRING(blah); + DECLARE_FIXED_STRING(foo); + DECLARE_FIXED_STRING(bar); + DECLARE_FIXED_STRING(bear); + int strId1 = FIXED_STRING_ID(blah); + int strId2 = FIXED_STRING_ID(foo); + int strId3 = FIXED_STRING_ID(bar); + int strId4 = FIXED_STRING_ID(bear); + const char* stringFromId1 = FixedStringFromId(strId1); + printf("String is: -%d=%s- -%d=%s- -%d=%s-\n", strId1, stringFromId1, strId2, FixedStringFromId(strId2), strId3, FixedStringFromId(strId3)); + + printf("Contents of StringTable:\n"); + for (int i = 0; i < StringTableSize(); ++i) + { + printf(" [%d] = %s\n", i, FixedStringFromId(i)); + } +}