threading, optimizations, api additions, directshow features
This commit is contained in:
parent
c108d1d506
commit
42c6b401e2
@ -383,8 +383,78 @@ struct FPSInfo
|
||||
};
|
||||
|
||||
|
||||
bool GetClosestResolution(List<MediaOutputInfo> &outputList, SIZE &resolution, UINT &fps)
|
||||
{
|
||||
LONG width, height;
|
||||
UINT internalFPS = API->GetMaxFPS();
|
||||
API->GetBaseSize((UINT&)width, (UINT&)height);
|
||||
|
||||
LONG bestDistance = 0x7FFFFFFF;
|
||||
SIZE bestSize;
|
||||
UINT minFPS = 0;
|
||||
UINT bestFPS = 0;
|
||||
|
||||
for(UINT i=0; i<outputList.Num(); i++)
|
||||
{
|
||||
MediaOutputInfo &outputInfo = outputList[i];
|
||||
|
||||
LONG outputWidth = outputInfo.minCX;
|
||||
do
|
||||
{
|
||||
LONG distWidth = width-outputWidth;
|
||||
if(distWidth < 0)
|
||||
break;
|
||||
|
||||
if(distWidth > bestDistance)
|
||||
{
|
||||
outputWidth += outputInfo.xGranularity;
|
||||
continue;
|
||||
}
|
||||
|
||||
LONG outputHeight = outputInfo.minCY;
|
||||
do
|
||||
{
|
||||
LONG distHeight = height-outputHeight;
|
||||
if(distHeight < 0)
|
||||
break;
|
||||
|
||||
LONG totalDist = distHeight+distWidth;
|
||||
if((totalDist <= bestDistance) || (totalDist == bestDistance && outputInfo.maxFPS > bestFPS))
|
||||
{
|
||||
bestDistance = totalDist;
|
||||
bestSize.cx = outputWidth;
|
||||
bestSize.cy = outputHeight;
|
||||
minFPS = (UINT)outputInfo.minFPS;
|
||||
bestFPS = (UINT)outputInfo.maxFPS;
|
||||
}
|
||||
|
||||
outputHeight += outputInfo.yGranularity;
|
||||
}while((UINT)outputHeight <= outputInfo.maxCY);
|
||||
|
||||
outputWidth += outputInfo.xGranularity;
|
||||
}while((UINT)outputWidth <= outputInfo.maxCX);
|
||||
}
|
||||
|
||||
if(bestDistance != 0x7FFFFFFF)
|
||||
{
|
||||
resolution.cx = bestSize.cx;
|
||||
resolution.cy = bestSize.cy;
|
||||
|
||||
if(internalFPS < minFPS)
|
||||
fps = minFPS;
|
||||
else if(internalFPS > bestFPS)
|
||||
fps = bestFPS;
|
||||
else
|
||||
fps = internalFPS;
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
struct ConfigDialogData
|
||||
{
|
||||
CTSTR lpName;
|
||||
XElement *data;
|
||||
List<MediaOutputInfo> outputList;
|
||||
List<SIZE> resolutions;
|
||||
@ -522,13 +592,20 @@ INT_PTR CALLBACK ConfigureDialogProc(HWND hwnd, UINT message, WPARAM wParam, LPA
|
||||
HWND hwndFPS = GetDlgItem(hwnd, IDC_FPS);
|
||||
HWND hwndFlip = GetDlgItem(hwnd, IDC_FLIPIMAGE);
|
||||
|
||||
//------------------------------------------
|
||||
|
||||
bool bFlipVertical = configData->data->GetInt(TEXT("flipImage")) != 0;
|
||||
SendMessage(hwndFlip, BM_SETCHECK, bFlipVertical ? BST_CHECKED : BST_UNCHECKED, 0);
|
||||
|
||||
//------------------------------------------
|
||||
|
||||
String strDevice = configData->data->GetString(TEXT("device"));
|
||||
UINT cx = configData->data->GetInt(TEXT("resolutionWidth"));
|
||||
UINT cy = configData->data->GetInt(TEXT("resolutionHeight"));
|
||||
UINT fps = configData->data->GetInt(TEXT("fps"));
|
||||
bool bFlipVertical = configData->data->GetInt(TEXT("flipImage")) != 0;
|
||||
|
||||
SendMessage(hwndFlip, BM_SETCHECK, bFlipVertical ? BST_CHECKED : BST_UNCHECKED, 0);
|
||||
BOOL bCustomResolution = configData->data->GetInt(TEXT("customResolution"));
|
||||
SendMessage(GetDlgItem(hwnd, IDC_CUSTOMRESOLUTION), BM_SETCHECK, bCustomResolution ? BST_CHECKED : BST_UNCHECKED, 0);
|
||||
|
||||
LocalizeWindow(hwnd, pluginLocale);
|
||||
FillOutListOfVideoDevices(GetDlgItem(hwnd, IDC_DEVICELIST));
|
||||
@ -547,21 +624,147 @@ INT_PTR CALLBACK ConfigureDialogProc(HWND hwnd, UINT message, WPARAM wParam, LPA
|
||||
SendMessage(hwndDeviceList, CB_SETCURSEL, deviceID, 0);
|
||||
ConfigureDialogProc(hwnd, WM_COMMAND, MAKEWPARAM(IDC_DEVICELIST, CBN_SELCHANGE), (LPARAM)hwndDeviceList);
|
||||
|
||||
String strResolution;
|
||||
strResolution << UIntString(cx) << TEXT("x") << UIntString(cy);
|
||||
if(bCustomResolution)
|
||||
{
|
||||
String strResolution;
|
||||
strResolution << UIntString(cx) << TEXT("x") << UIntString(cy);
|
||||
|
||||
SendMessage(hwndResolutionList, WM_SETTEXT, 0, (LPARAM)strResolution.Array());
|
||||
ConfigureDialogProc(hwnd, WM_COMMAND, MAKEWPARAM(IDC_RESOLUTION, CBN_EDITCHANGE), (LPARAM)hwndResolutionList);
|
||||
SendMessage(hwndResolutionList, WM_SETTEXT, 0, (LPARAM)strResolution.Array());
|
||||
ConfigureDialogProc(hwnd, WM_COMMAND, MAKEWPARAM(IDC_RESOLUTION, CBN_EDITCHANGE), (LPARAM)hwndResolutionList);
|
||||
|
||||
SendMessage(hwndFPS, UDM_SETPOS32, 0, (LPARAM)fps);
|
||||
SendMessage(hwndFPS, UDM_SETPOS32, 0, (LPARAM)fps);
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
ConfigureDialogProc(hwnd, WM_COMMAND, MAKEWPARAM(IDC_CUSTOMRESOLUTION, BN_CLICKED), (LPARAM)GetDlgItem(hwnd, IDC_CUSTOMRESOLUTION));
|
||||
|
||||
//------------------------------------------
|
||||
|
||||
BOOL bUseColorKey = configData->data->GetInt(TEXT("useColorKey"), 0);
|
||||
DWORD keyColor = configData->data->GetInt(TEXT("keyColor"), 0xFFFFFFFF);
|
||||
UINT similarity = configData->data->GetInt(TEXT("keySimilarity"), 0);
|
||||
UINT blend = configData->data->GetInt(TEXT("keyBlend"), 10);
|
||||
UINT gamma = configData->data->GetInt(TEXT("keyGamma"), 0);
|
||||
|
||||
SendMessage(GetDlgItem(hwnd, IDC_USECOLORKEY), BM_SETCHECK, bUseColorKey ? BST_CHECKED : BST_UNCHECKED, 0);
|
||||
CCSetColor(GetDlgItem(hwnd, IDC_COLOR), keyColor);
|
||||
|
||||
SendMessage(GetDlgItem(hwnd, IDC_BASETHRESHOLD), UDM_SETRANGE32, 0, 100);
|
||||
SendMessage(GetDlgItem(hwnd, IDC_BASETHRESHOLD), UDM_SETPOS32, 0, similarity);
|
||||
|
||||
SendMessage(GetDlgItem(hwnd, IDC_BLEND), UDM_SETRANGE32, 0, 100);
|
||||
SendMessage(GetDlgItem(hwnd, IDC_BLEND), UDM_SETPOS32, 0, blend);
|
||||
|
||||
SendMessage(GetDlgItem(hwnd, IDC_GAMMA), UDM_SETRANGE32, -75, 75);
|
||||
SendMessage(GetDlgItem(hwnd, IDC_GAMMA), UDM_SETPOS32, 0, gamma);
|
||||
|
||||
EnableWindow(GetDlgItem(hwnd, IDC_COLOR), bUseColorKey);
|
||||
EnableWindow(GetDlgItem(hwnd, IDC_SELECTCOLOR), bUseColorKey);
|
||||
EnableWindow(GetDlgItem(hwnd, IDC_BASETHRESHOLD_EDIT), bUseColorKey);
|
||||
EnableWindow(GetDlgItem(hwnd, IDC_BASETHRESHOLD), bUseColorKey);
|
||||
EnableWindow(GetDlgItem(hwnd, IDC_BLEND_EDIT), bUseColorKey);
|
||||
EnableWindow(GetDlgItem(hwnd, IDC_BLEND), bUseColorKey);
|
||||
EnableWindow(GetDlgItem(hwnd, IDC_GAMMA_EDIT), bUseColorKey);
|
||||
EnableWindow(GetDlgItem(hwnd, IDC_GAMMA), bUseColorKey);
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
case WM_COMMAND:
|
||||
switch(LOWORD(wParam))
|
||||
{
|
||||
case IDC_CUSTOMRESOLUTION:
|
||||
{
|
||||
HWND hwndUseCustomResolution = (HWND)lParam;
|
||||
BOOL bCustomResolution = SendMessage(hwndUseCustomResolution, BM_GETCHECK, 0, 0) == BST_CHECKED;
|
||||
|
||||
EnableWindow(GetDlgItem(hwnd, IDC_RESOLUTION), bCustomResolution);
|
||||
EnableWindow(GetDlgItem(hwnd, IDC_FPS), bCustomResolution);
|
||||
EnableWindow(GetDlgItem(hwnd, IDC_FPS_EDIT), bCustomResolution);
|
||||
break;
|
||||
}
|
||||
|
||||
case IDC_USECOLORKEY:
|
||||
{
|
||||
HWND hwndUseColorKey = (HWND)lParam;
|
||||
BOOL bUseColorKey = SendMessage(hwndUseColorKey, BM_GETCHECK, 0, 0) == BST_CHECKED;
|
||||
|
||||
ConfigDialogData *configData = (ConfigDialogData*)GetWindowLongPtr(hwnd, DWLP_USER);
|
||||
ImageSource *source = API->GetSceneImageSource(configData->lpName);
|
||||
if(source)
|
||||
source->SetInt(TEXT("useColorKey"), bUseColorKey);
|
||||
|
||||
EnableWindow(GetDlgItem(hwnd, IDC_COLOR), bUseColorKey);
|
||||
EnableWindow(GetDlgItem(hwnd, IDC_SELECTCOLOR), bUseColorKey);
|
||||
EnableWindow(GetDlgItem(hwnd, IDC_BASETHRESHOLD_EDIT), bUseColorKey);
|
||||
EnableWindow(GetDlgItem(hwnd, IDC_BASETHRESHOLD), bUseColorKey);
|
||||
EnableWindow(GetDlgItem(hwnd, IDC_BLEND_EDIT), bUseColorKey);
|
||||
EnableWindow(GetDlgItem(hwnd, IDC_BLEND), bUseColorKey);
|
||||
EnableWindow(GetDlgItem(hwnd, IDC_GAMMA_EDIT), bUseColorKey);
|
||||
EnableWindow(GetDlgItem(hwnd, IDC_GAMMA), bUseColorKey);
|
||||
break;
|
||||
}
|
||||
|
||||
case IDC_COLOR:
|
||||
{
|
||||
ConfigDialogData *configData = (ConfigDialogData*)GetWindowLongPtr(hwnd, DWLP_USER);
|
||||
ImageSource *source = API->GetSceneImageSource(configData->lpName);
|
||||
|
||||
if(source)
|
||||
{
|
||||
DWORD color = CCGetColor((HWND)lParam);
|
||||
source->SetInt(TEXT("keyColor"), color);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case IDC_FLIPIMAGE:
|
||||
if(HIWORD(wParam) == BN_CLICKED)
|
||||
{
|
||||
ConfigDialogData *configData = (ConfigDialogData*)GetWindowLongPtr(hwnd, DWLP_USER);
|
||||
ImageSource *source = API->GetSceneImageSource(configData->lpName);
|
||||
if(source)
|
||||
{
|
||||
HWND hwndFlip = (HWND)lParam;
|
||||
BOOL bFlipImage = SendMessage(hwndFlip, BM_GETCHECK, 0, 0) == BST_CHECKED;
|
||||
|
||||
source->SetInt(TEXT("flipImage"), bFlipImage);
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case IDC_BASETHRESHOLD_EDIT:
|
||||
case IDC_BLEND_EDIT:
|
||||
case IDC_GAMMA_EDIT:
|
||||
if(HIWORD(wParam) == EN_CHANGE)
|
||||
{
|
||||
ConfigDialogData *configData = (ConfigDialogData*)GetWindowLongPtr(hwnd, DWLP_USER);
|
||||
if(configData)
|
||||
{
|
||||
ImageSource *source = API->GetSceneImageSource(configData->lpName);
|
||||
|
||||
if(source)
|
||||
{
|
||||
HWND hwndVal = NULL;
|
||||
switch(LOWORD(wParam))
|
||||
{
|
||||
case IDC_BASETHRESHOLD_EDIT: hwndVal = GetDlgItem(hwnd, IDC_BASETHRESHOLD); break;
|
||||
case IDC_BLEND_EDIT: hwndVal = GetDlgItem(hwnd, IDC_BLEND); break;
|
||||
case IDC_GAMMA_EDIT: hwndVal = GetDlgItem(hwnd, IDC_GAMMA); break;
|
||||
}
|
||||
|
||||
int val = (int)SendMessage(hwndVal, UDM_GETPOS32, 0, 0);
|
||||
switch(LOWORD(wParam))
|
||||
{
|
||||
case IDC_BASETHRESHOLD_EDIT: source->SetInt(TEXT("keySimilarity"), val); break;
|
||||
case IDC_BLEND_EDIT: source->SetInt(TEXT("keyBlend"), val); break;
|
||||
case IDC_GAMMA_EDIT: source->SetInt(TEXT("keyGamma"), val); break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case IDC_REFRESH:
|
||||
{
|
||||
HWND hwndDeviceList = GetDlgItem(hwnd, IDC_DEVICELIST);
|
||||
@ -624,8 +827,26 @@ INT_PTR CALLBACK ConfigureDialogProc(HWND hwnd, UINT message, WPARAM wParam, LPA
|
||||
filter->Release();
|
||||
}
|
||||
|
||||
SendMessage(hwndResolutions, CB_SETCURSEL, 0, 0);
|
||||
ConfigureDialogProc(hwnd, WM_COMMAND, MAKEWPARAM(IDC_RESOLUTION, CBN_SELCHANGE), (LPARAM)hwndResolutions);
|
||||
//-------------------------------------------------
|
||||
|
||||
SIZE size;
|
||||
UINT fps;
|
||||
if(GetClosestResolution(configData->outputList, size, fps))
|
||||
{
|
||||
String strResolution;
|
||||
strResolution << UIntString(size.cx) << TEXT("x") << UIntString(size.cy);
|
||||
|
||||
SendMessage(hwndResolutions, WM_SETTEXT, 0, (LPARAM)strResolution.Array());
|
||||
ConfigureDialogProc(hwnd, WM_COMMAND, MAKEWPARAM(IDC_RESOLUTION, CBN_EDITCHANGE), (LPARAM)hwndResolutions);
|
||||
|
||||
HWND hwndFPS = GetDlgItem(hwnd, IDC_FPS);
|
||||
SendMessage(hwndFPS, UDM_SETPOS32, 0, fps);
|
||||
}
|
||||
else
|
||||
{
|
||||
SendMessage(hwndResolutions, CB_SETCURSEL, 0, 0);
|
||||
ConfigureDialogProc(hwnd, WM_COMMAND, MAKEWPARAM(IDC_RESOLUTION, CBN_SELCHANGE), (LPARAM)hwndResolutions);
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
@ -636,8 +857,8 @@ INT_PTR CALLBACK ConfigureDialogProc(HWND hwnd, UINT message, WPARAM wParam, LPA
|
||||
ConfigDialogData *configData = (ConfigDialogData*)GetWindowLongPtr(hwnd, DWLP_USER);
|
||||
|
||||
HWND hwndResolution = (HWND)lParam;
|
||||
HWND hwndFPS = GetDlgItem(hwnd, IDC_FPS);
|
||||
HWND hwndFPSEdit = GetDlgItem(hwnd, IDC_FPS_EDIT);
|
||||
HWND hwndFPS = GetDlgItem(hwnd, IDC_FPS);
|
||||
HWND hwndFPSEdit = GetDlgItem(hwnd, IDC_FPS_EDIT);
|
||||
|
||||
SIZE resolution;
|
||||
FPSInfo fpsInfo;
|
||||
@ -737,15 +958,52 @@ INT_PTR CALLBACK ConfigureDialogProc(HWND hwnd, UINT message, WPARAM wParam, LPA
|
||||
}
|
||||
|
||||
BOOL bFlip = SendMessage(GetDlgItem(hwnd, IDC_FLIPIMAGE), BM_GETCHECK, 0, 0) == BST_CHECKED;
|
||||
BOOL bCustomResolution = SendMessage(GetDlgItem(hwnd, IDC_CUSTOMRESOLUTION), BM_GETCHECK, 0, 0) == BST_CHECKED;
|
||||
|
||||
configData->data->SetString(TEXT("device"), strDevice);
|
||||
configData->data->SetInt(TEXT("customResolution"), bCustomResolution);
|
||||
configData->data->SetInt(TEXT("resolutionWidth"), resolution.cx);
|
||||
configData->data->SetInt(TEXT("resolutionHeight"), resolution.cy);
|
||||
configData->data->SetInt(TEXT("fps"), fps);
|
||||
configData->data->SetInt(TEXT("flipImage"), bFlip);
|
||||
|
||||
BOOL bUseColorKey = SendMessage(GetDlgItem(hwnd, IDC_USECOLORKEY), BM_GETCHECK, 0, 0) == BST_CHECKED;
|
||||
DWORD color = CCGetColor(GetDlgItem(hwnd, IDC_COLOR));
|
||||
|
||||
UINT keySimilarity = (UINT)SendMessage(GetDlgItem(hwnd, IDC_BASETHRESHOLD), UDM_GETPOS32, 0, (LPARAM)&bUDMError);
|
||||
if(bUDMError) keySimilarity = 0;
|
||||
|
||||
UINT keyBlend = (UINT)SendMessage(GetDlgItem(hwnd, IDC_BLEND), UDM_GETPOS32, 0, (LPARAM)&bUDMError);
|
||||
if(bUDMError) keyBlend = 10;
|
||||
|
||||
int keyGamma = (int)SendMessage(GetDlgItem(hwnd, IDC_GAMMA), UDM_GETPOS32, 0, (LPARAM)&bUDMError);
|
||||
if(bUDMError) keyGamma = 0;
|
||||
|
||||
configData->data->SetInt(TEXT("useColorKey"), bUseColorKey);
|
||||
configData->data->SetInt(TEXT("keyColor"), color);
|
||||
configData->data->SetInt(TEXT("keySimilarity"), keySimilarity);
|
||||
configData->data->SetInt(TEXT("keyBlend"), keyBlend);
|
||||
configData->data->SetInt(TEXT("keyGamma"), keyGamma);
|
||||
}
|
||||
|
||||
case IDCANCEL:
|
||||
if(LOWORD(wParam) == IDCANCEL)
|
||||
{
|
||||
ConfigDialogData *configData = (ConfigDialogData*)GetWindowLongPtr(hwnd, DWLP_USER);
|
||||
ImageSource *source = API->GetSceneImageSource(configData->lpName);
|
||||
|
||||
if(source)
|
||||
{
|
||||
source->SetInt(TEXT("flipImage"), configData->data->GetInt(TEXT("flipImage"), 0));
|
||||
|
||||
source->SetInt(TEXT("useColorKey"), configData->data->GetInt(TEXT("useColorKey"), 0));
|
||||
source->SetInt(TEXT("keyColor"), configData->data->GetInt(TEXT("keyColor"), 0xFFFFFFFF));
|
||||
source->SetInt(TEXT("keySimilarity"), configData->data->GetInt(TEXT("keySimilarity"), 0));
|
||||
source->SetInt(TEXT("keyBlend"), configData->data->GetInt(TEXT("keyBlend"), 10));
|
||||
source->SetInt(TEXT("keyGamma"), configData->data->GetInt(TEXT("keyGamma"), 0));
|
||||
}
|
||||
}
|
||||
|
||||
EndDialog(hwnd, LOWORD(wParam));
|
||||
}
|
||||
}
|
||||
@ -767,6 +1025,7 @@ bool STDCALL ConfigureDShowSource(XElement *element, bool bCreating)
|
||||
data = element->CreateElement(TEXT("data"));
|
||||
|
||||
ConfigDialogData *configData = new ConfigDialogData;
|
||||
configData->lpName = element->GetName();
|
||||
configData->data = data;
|
||||
configData->bGlobalSource = (scmpi(element->GetParent()->GetName(), TEXT("global sources")) == 0);
|
||||
configData->bCreating = bCreating;
|
||||
@ -801,6 +1060,8 @@ bool LoadPlugin()
|
||||
{
|
||||
traceIn(DShowPluginLoadPlugin);
|
||||
|
||||
InitColorControl(hinstMain);
|
||||
|
||||
pluginLocale = new LocaleStringLookup;
|
||||
|
||||
if(!pluginLocale->LoadStringFile(TEXT("plugins/DShowPlugin/locale/en.txt")))
|
||||
|
@ -35,6 +35,7 @@ extern HINSTANCE hinstMain;
|
||||
IBaseFilter* GetDeviceByName(CTSTR lpName);
|
||||
IPin* GetOutputPin(IBaseFilter *filter);
|
||||
void GetOutputList(IPin *curPin, List<MediaOutputInfo> &outputInfoList);
|
||||
bool GetClosestResolution(List<MediaOutputInfo> &outputList, SIZE &resolution, UINT &fps);
|
||||
|
||||
extern LocaleStringLookup *pluginLocale;
|
||||
#define PluginStr(text) pluginLocale->LookupString(TEXT2(text))
|
||||
|
@ -52,7 +52,7 @@ END
|
||||
// Dialog
|
||||
//
|
||||
|
||||
IDD_CONFIG DIALOGEX 0, 0, 417, 92
|
||||
IDD_CONFIG DIALOGEX 0, 0, 417, 235
|
||||
STYLE DS_SETFONT | DS_MODALFRAME | DS_FIXEDSYS | DS_CENTER | WS_POPUP | WS_CAPTION | WS_SYSMENU
|
||||
CAPTION "DeviceSelection"
|
||||
FONT 8, "MS Shell Dlg", 400, 0, 0x1
|
||||
@ -60,15 +60,32 @@ BEGIN
|
||||
RTEXT "DeviceSelection.Device",IDC_STATIC,4,9,117,8
|
||||
COMBOBOX IDC_DEVICELIST,125,7,133,12,CBS_DROPDOWNLIST | CBS_SORT | WS_VSCROLL | WS_TABSTOP
|
||||
PUSHBUTTON "DeviceSelection.Config",IDC_CONFIG,263,7,78,14
|
||||
RTEXT "DeviceSelection.Resolution",IDC_STATIC,4,27,117,8
|
||||
COMBOBOX IDC_RESOLUTION,125,24,133,72,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP
|
||||
RTEXT "DeviceSelection.FPS",IDC_STATIC,4,44,117,8
|
||||
EDITTEXT IDC_FPS_EDIT,125,41,42,14,ES_AUTOHSCROLL | ES_READONLY | ES_NUMBER
|
||||
CONTROL "",IDC_FPS,"msctls_updown32",UDS_SETBUDDYINT | UDS_ALIGNRIGHT | UDS_AUTOBUDDY | UDS_ARROWKEYS,168,41,10,14
|
||||
DEFPUSHBUTTON "OK",IDOK,306,71,50,14
|
||||
PUSHBUTTON "Cancel",IDCANCEL,360,71,50,14
|
||||
PUSHBUTTON "DeviceSelection.Refresh",IDC_REFRESH,344,7,66,14
|
||||
CONTROL "DeviceSelection.FlipImage",IDC_FLIPIMAGE,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,3,59,131,10,WS_EX_RIGHT
|
||||
CONTROL "DeviceSelection.FlipImage",IDC_FLIPIMAGE,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,2,26,131,10,WS_EX_RIGHT
|
||||
GROUPBOX "DeviceSelection.Resolution",IDC_STATIC,7,41,274,65
|
||||
CONTROL "DeviceSelection.CustomResolution",IDC_CUSTOMRESOLUTION,
|
||||
"Button",BS_AUTOCHECKBOX | WS_TABSTOP,17,53,130,10,WS_EX_RIGHT
|
||||
RTEXT "DeviceSelection.Resolution",IDC_STATIC,18,71,117,8
|
||||
COMBOBOX IDC_RESOLUTION,138,68,133,72,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP
|
||||
RTEXT "DeviceSelection.FPS",IDC_STATIC,18,88,117,8
|
||||
EDITTEXT IDC_FPS_EDIT,138,85,42,14,ES_AUTOHSCROLL | ES_NUMBER
|
||||
CONTROL "",IDC_FPS,"msctls_updown32",UDS_SETBUDDYINT | UDS_ALIGNRIGHT | UDS_AUTOBUDDY | UDS_ARROWKEYS,180,84,10,14
|
||||
GROUPBOX "DeviceSelection.ColorKey",IDC_STATIC,7,112,274,103
|
||||
CONTROL "DeviceSelection.UseColorKey",IDC_USECOLORKEY,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,17,126,130,10,WS_EX_RIGHT
|
||||
RTEXT "DeviceSelection.Color",IDC_STATIC,18,144,117,8
|
||||
CONTROL "",IDC_COLOR,"OBSColorControl",WS_TABSTOP,138,141,28,14
|
||||
PUSHBUTTON "DeviceSelection.Select",IDC_SELECTCOLOR,170,141,50,14
|
||||
RTEXT "DeviceSelection.Similarity",IDC_STATIC,18,162,117,8
|
||||
EDITTEXT IDC_BASETHRESHOLD_EDIT,137,160,40,14,ES_AUTOHSCROLL | ES_NUMBER
|
||||
CONTROL "",IDC_BASETHRESHOLD,"msctls_updown32",UDS_SETBUDDYINT | UDS_ALIGNRIGHT | UDS_AUTOBUDDY | UDS_ARROWKEYS,178,160,10,14
|
||||
RTEXT "DeviceSelection.Blend",IDC_STATIC,17,179,117,8
|
||||
EDITTEXT IDC_BLEND_EDIT,137,177,40,14,ES_AUTOHSCROLL | ES_NUMBER
|
||||
CONTROL "",IDC_BLEND,"msctls_updown32",UDS_SETBUDDYINT | UDS_ALIGNRIGHT | UDS_AUTOBUDDY | UDS_ARROWKEYS,177,177,11,14
|
||||
RTEXT "DeviceSelection.Gamma",IDC_STATIC,17,195,117,8
|
||||
EDITTEXT IDC_GAMMA_EDIT,137,193,40,14,ES_AUTOHSCROLL
|
||||
CONTROL "",IDC_GAMMA,"msctls_updown32",UDS_SETBUDDYINT | UDS_ALIGNRIGHT | UDS_AUTOBUDDY | UDS_ARROWKEYS,177,193,11,14
|
||||
DEFPUSHBUTTON "OK",IDOK,306,214,50,14
|
||||
PUSHBUTTON "Cancel",IDCANCEL,360,214,50,14
|
||||
END
|
||||
|
||||
|
||||
@ -85,7 +102,7 @@ BEGIN
|
||||
LEFTMARGIN, 7
|
||||
RIGHTMARGIN, 410
|
||||
TOPMARGIN, 7
|
||||
BOTTOMMARGIN, 85
|
||||
BOTTOMMARGIN, 228
|
||||
END
|
||||
END
|
||||
#endif // APSTUDIO_INVOKED
|
||||
|
@ -280,7 +280,7 @@
|
||||
/>
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="2"
|
||||
Optimization="3"
|
||||
EnableIntrinsicFunctions="true"
|
||||
AdditionalIncludeDirectories="../OBSApi"
|
||||
PreprocessorDefinitions="WIN32;NDEBUG;_WINDOWS;_USRDLL;DSHOWPLUGIN_EXPORTS"
|
||||
@ -404,6 +404,34 @@
|
||||
<Filter
|
||||
Name="Shaders"
|
||||
>
|
||||
<File
|
||||
RelativePath="..\rundir\plugins\DShowPlugin\shaders\ColorKey_HDYCToRGB.pShader"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\rundir\plugins\DShowPlugin\shaders\ColorKey_RGB.pShader"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\rundir\plugins\DShowPlugin\shaders\ColorKey_UYVToRGB.pShader"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\rundir\plugins\DShowPlugin\shaders\ColorKey_YUVToRGB.pShader"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\rundir\plugins\DShowPlugin\shaders\ColorKey_YUXVToRGB.pShader"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\rundir\plugins\DShowPlugin\shaders\ColorKey_YVUToRGB.pShader"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\rundir\plugins\DShowPlugin\shaders\ColorKey_YVXUToRGB.pShader"
|
||||
>
|
||||
</File>
|
||||
<File
|
||||
RelativePath="..\rundir\plugins\DShowPlugin\shaders\HDYCToRGB.pShader"
|
||||
>
|
||||
|
@ -48,6 +48,13 @@ bool DeviceSource::Init(XElement *data)
|
||||
|
||||
capture->SetFiltergraph(graph);
|
||||
|
||||
UINT numProcessors = OSGetProcessorCount();
|
||||
hConvertThreads = (HANDLE*)Allocate(sizeof(HANDLE)*numProcessors);
|
||||
convertData = (ConvertData*)Allocate(sizeof(ConvertData)*numProcessors);
|
||||
|
||||
zero(hConvertThreads, sizeof(HANDLE)*numProcessors);
|
||||
zero(convertData, sizeof(ConvertData)*numProcessors);
|
||||
|
||||
this->data = data;
|
||||
UpdateSettings();
|
||||
|
||||
@ -69,12 +76,44 @@ DeviceSource::~DeviceSource()
|
||||
SafeRelease(capture);
|
||||
SafeRelease(graph);
|
||||
|
||||
Free(hConvertThreads);
|
||||
Free(convertData);
|
||||
|
||||
if(hSampleMutex)
|
||||
OSCloseMutex(hSampleMutex);
|
||||
|
||||
traceOut;
|
||||
}
|
||||
|
||||
String DeviceSource::ChooseShader()
|
||||
{
|
||||
if(colorType == DeviceOutputType_RGB && !bUseColorKey)
|
||||
return String();
|
||||
|
||||
String strShader;
|
||||
strShader << TEXT("plugins/DShowPlugin/shaders/");
|
||||
|
||||
if(bUseColorKey)
|
||||
strShader << TEXT("ColorKey_");
|
||||
|
||||
if(colorType == DeviceOutputType_I420)
|
||||
strShader << TEXT("YUVToRGB.pShader");
|
||||
else if(colorType == DeviceOutputType_YV12)
|
||||
strShader << TEXT("YVUToRGB.pShader");
|
||||
else if(colorType == DeviceOutputType_YVYU)
|
||||
strShader << TEXT("YVXUToRGB.pShader");
|
||||
else if(colorType == DeviceOutputType_YUY2)
|
||||
strShader << TEXT("YUXVToRGB.pShader");
|
||||
else if(colorType == DeviceOutputType_UYVY)
|
||||
strShader << TEXT("UYVToRGB.pShader");
|
||||
else if(colorType == DeviceOutputType_HDYC)
|
||||
strShader << TEXT("HDYCToRGB.pShader");
|
||||
else
|
||||
strShader << TEXT("RGB.pShader");
|
||||
|
||||
return strShader;
|
||||
}
|
||||
|
||||
bool DeviceSource::LoadFilters()
|
||||
{
|
||||
traceIn(DeviceSource::LoadFilters);
|
||||
@ -90,20 +129,28 @@ bool DeviceSource::LoadFilters()
|
||||
VideoOutputType expectedVideoType;
|
||||
IPin *devicePin = NULL;
|
||||
HRESULT err;
|
||||
String strShader;
|
||||
|
||||
String strDevice = data->GetString(TEXT("device"));
|
||||
UINT cx = data->GetInt(TEXT("resolutionWidth"));
|
||||
UINT cy = data->GetInt(TEXT("resolutionHeight"));
|
||||
UINT fps = data->GetInt(TEXT("fps"));
|
||||
//------------------------------------------------
|
||||
|
||||
bUseCustomResolution = data->GetInt(TEXT("customResolution"));
|
||||
strDevice = data->GetString(TEXT("device"));
|
||||
|
||||
bFlipVertical = data->GetInt(TEXT("flipImage")) != 0;
|
||||
|
||||
renderCX = cx;
|
||||
renderCY = cy;
|
||||
//------------------------------------------------
|
||||
|
||||
if(!strDevice.IsValid() || !cx || !cy || !fps)
|
||||
bUseColorKey = data->GetInt(TEXT("useColorKey")) != 0;
|
||||
keyColor = data->GetInt(TEXT("keyColor"), 0xFFFFFFFF);
|
||||
keySimilarity = data->GetInt(TEXT("keySimilarity"));
|
||||
keyBlend = data->GetInt(TEXT("keyBlend"), 10);
|
||||
keyGamma = data->GetInt(TEXT("keyGamma"));
|
||||
|
||||
//------------------------------------------------
|
||||
|
||||
if(!strDevice.IsValid())
|
||||
{
|
||||
AppWarning(TEXT("DShowPlugin: Invalid device/size/fps specified"));
|
||||
AppWarning(TEXT("DShowPlugin: Invalid device specified"));
|
||||
goto cleanFinish;
|
||||
}
|
||||
|
||||
@ -123,7 +170,51 @@ bool DeviceSource::LoadFilters()
|
||||
|
||||
GetOutputList(devicePin, outputList);
|
||||
|
||||
MediaOutputInfo *bestOutput = GetBestMediaOutput(outputList, cx, cy, fps);
|
||||
//------------------------------------------------
|
||||
|
||||
if(bUseCustomResolution)
|
||||
{
|
||||
renderCX = data->GetInt(TEXT("resolutionWidth"));
|
||||
renderCY = data->GetInt(TEXT("resolutionHeight"));
|
||||
fps = data->GetInt(TEXT("fps"));
|
||||
}
|
||||
else
|
||||
{
|
||||
SIZE size;
|
||||
GetClosestResolution(outputList, size, fps);
|
||||
renderCX = size.cx;
|
||||
renderCY = size.cy;
|
||||
}
|
||||
|
||||
if(!renderCX || !renderCY || !fps)
|
||||
{
|
||||
AppWarning(TEXT("DShowPlugin: Invalid size/fps specified"));
|
||||
goto cleanFinish;
|
||||
}
|
||||
|
||||
UINT numProcessors = OSGetProcessorCount();
|
||||
for(UINT i=0; i<numProcessors; i++)
|
||||
{
|
||||
convertData[i].width = renderCX;
|
||||
convertData[i].height = renderCY;
|
||||
|
||||
if(i == 0)
|
||||
convertData[i].startY = 0;
|
||||
else
|
||||
convertData[i].startY = convertData[i-1].endY;
|
||||
|
||||
if(i == (numProcessors-1))
|
||||
convertData[i].endY = renderCY;
|
||||
else
|
||||
convertData[i].endY = ((renderCY/numProcessors)*(i+1)) & 0xFFFFFFFE;
|
||||
}
|
||||
|
||||
bFirstFrame = true;
|
||||
prevSample = NULL;
|
||||
|
||||
//------------------------------------------------
|
||||
|
||||
MediaOutputInfo *bestOutput = GetBestMediaOutput(outputList, renderCX, renderCY, fps);
|
||||
if(!bestOutput)
|
||||
{
|
||||
AppWarning(TEXT("DShowPlugin: Could not find appropriate resolution to create device image source"));
|
||||
@ -132,41 +223,29 @@ bool DeviceSource::LoadFilters()
|
||||
|
||||
expectedVideoType = bestOutput->videoType;
|
||||
|
||||
colorType = DeviceOutputType_RGB;
|
||||
|
||||
if(bestOutput->videoType == VideoOutputType_I420)
|
||||
{
|
||||
colorConvertShader = CreatePixelShaderFromFile(TEXT("plugins/DShowPlugin/shaders/YUVToRGB.pShader"));
|
||||
colorType = DeviceOutputType_I420;
|
||||
}
|
||||
else if(bestOutput->videoType == VideoOutputType_YV12)
|
||||
{
|
||||
colorConvertShader = CreatePixelShaderFromFile(TEXT("plugins/DShowPlugin/shaders/YVUToRGB.pShader"));
|
||||
colorType = DeviceOutputType_YV12;
|
||||
}
|
||||
else if(bestOutput->videoType == VideoOutputType_YVYU)
|
||||
{
|
||||
colorConvertShader = CreatePixelShaderFromFile(TEXT("plugins/DShowPlugin/shaders/YVXUToRGB.pShader"));
|
||||
colorType = DeviceOutputType_YVYU;
|
||||
}
|
||||
else if(bestOutput->videoType == VideoOutputType_YUY2)
|
||||
{
|
||||
colorConvertShader = CreatePixelShaderFromFile(TEXT("plugins/DShowPlugin/shaders/YUXVToRGB.pShader"));
|
||||
colorType = DeviceOutputType_YUY2;
|
||||
}
|
||||
else if(bestOutput->videoType == VideoOutputType_UYVY)
|
||||
{
|
||||
colorConvertShader = CreatePixelShaderFromFile(TEXT("plugins/DShowPlugin/shaders/UYVToRGB.pShader"));
|
||||
colorType = DeviceOutputType_UYVY;
|
||||
}
|
||||
else if(bestOutput->videoType == VideoOutputType_HDYC)
|
||||
{
|
||||
colorConvertShader = CreatePixelShaderFromFile(TEXT("plugins/DShowPlugin/shaders/HDYCToRGB.pShader"));
|
||||
colorType = DeviceOutputType_UYVY;
|
||||
}
|
||||
colorType = DeviceOutputType_HDYC;
|
||||
else
|
||||
{
|
||||
colorType = DeviceOutputType_RGB;
|
||||
expectedVideoType = VideoOutputType_RGB32;
|
||||
}
|
||||
|
||||
if(colorType != DeviceOutputType_RGB)
|
||||
lpImageBuffer = (LPBYTE)Allocate(renderCX*renderCY*4);
|
||||
|
||||
strShader = ChooseShader();
|
||||
if(strShader.IsValid())
|
||||
colorConvertShader = CreatePixelShaderFromFile(strShader);
|
||||
|
||||
if(colorType != DeviceOutputType_RGB && !colorConvertShader)
|
||||
{
|
||||
@ -260,6 +339,12 @@ cleanFinish:
|
||||
colorConvertShader = NULL;
|
||||
}
|
||||
|
||||
if(lpImageBuffer)
|
||||
{
|
||||
Free(lpImageBuffer);
|
||||
lpImageBuffer = NULL;
|
||||
}
|
||||
|
||||
bReadyToDraw = true;
|
||||
}
|
||||
else
|
||||
@ -267,17 +352,17 @@ cleanFinish:
|
||||
|
||||
//-----------------------------------------------------
|
||||
// create the texture regardless, will just show up as red to indicate failure
|
||||
BYTE *textureData = (BYTE*)Allocate(cx*cy*4);
|
||||
BYTE *textureData = (BYTE*)Allocate(renderCX*renderCY*4);
|
||||
|
||||
if(colorType == DeviceOutputType_RGB) //you may be confused, but when directshow outputs RGB, it's actually outputting BGR
|
||||
{
|
||||
msetd(textureData, 0xFFFF0000, cx*cy*4);
|
||||
texture = CreateTexture(cx, cy, GS_BGR, textureData, FALSE, FALSE);
|
||||
msetd(textureData, 0xFFFF0000, renderCX*renderCY*4);
|
||||
texture = CreateTexture(renderCX, renderCY, GS_BGR, textureData, FALSE, FALSE);
|
||||
}
|
||||
else //if we're working with planar YUV, we can just use regular RGB textures instead
|
||||
{
|
||||
msetd(textureData, 0xFF0000FF, cx*cy*4);
|
||||
texture = CreateTexture(cx, cy, GS_RGB, textureData, FALSE, FALSE);
|
||||
msetd(textureData, 0xFF0000FF, renderCX*renderCY*4);
|
||||
texture = CreateTexture(renderCX, renderCY, GS_RGB, textureData, FALSE, FALSE);
|
||||
}
|
||||
|
||||
Free(textureData);
|
||||
@ -315,6 +400,25 @@ void DeviceSource::UnloadFilters()
|
||||
colorConvertShader = NULL;
|
||||
}
|
||||
|
||||
if(lpImageBuffer)
|
||||
{
|
||||
Free(lpImageBuffer);
|
||||
lpImageBuffer = NULL;
|
||||
}
|
||||
|
||||
UINT numProcessors = OSGetProcessorCount();
|
||||
for(UINT i=0; i<numProcessors; i++)
|
||||
{
|
||||
if(hConvertThreads[i])
|
||||
{
|
||||
WaitForSingleObject(hConvertThreads[i], INFINITE);
|
||||
CloseHandle(hConvertThreads[i]);
|
||||
hConvertThreads[i] = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
SafeRelease(prevSample);
|
||||
|
||||
SafeRelease(control);
|
||||
|
||||
traceOut;
|
||||
@ -385,6 +489,13 @@ void DeviceSource::Receive(IMediaSample *sample)
|
||||
}
|
||||
}
|
||||
|
||||
DWORD STDCALL PackPlanarThread(ConvertData *data)
|
||||
{
|
||||
SetThreadPriority(GetCurrentThread(), THREAD_PRIORITY_ABOVE_NORMAL);
|
||||
PackPlanar(data->output, data->input, data->width, data->height, data->pitch, data->startY, data->endY);
|
||||
return 0;
|
||||
}
|
||||
|
||||
void DeviceSource::Preprocess()
|
||||
{
|
||||
traceIn(DeviceSource::Preprocess);
|
||||
@ -402,6 +513,8 @@ void DeviceSource::Preprocess()
|
||||
}
|
||||
OSLeaveMutex(hSampleMutex);
|
||||
|
||||
UINT numProcessors = OSGetProcessorCount();
|
||||
|
||||
if(lastSample)
|
||||
{
|
||||
BYTE *lpImage = NULL;
|
||||
@ -412,20 +525,37 @@ void DeviceSource::Preprocess()
|
||||
if(SUCCEEDED(lastSample->GetPointer(&lpImage)))
|
||||
texture->SetImage(lpImage, GS_IMAGEFORMAT_BGRX, renderCX*4);
|
||||
}
|
||||
|
||||
lastSample->Release();
|
||||
}
|
||||
else if(colorType == DeviceOutputType_I420 || colorType == DeviceOutputType_YV12)
|
||||
{
|
||||
if(prevSample)
|
||||
{
|
||||
WaitForMultipleObjects(numProcessors, hConvertThreads, TRUE, INFINITE);
|
||||
for(UINT i=0; i<numProcessors; i++)
|
||||
{
|
||||
CloseHandle(hConvertThreads[i]);
|
||||
hConvertThreads[i] = NULL;
|
||||
}
|
||||
prevSample->Release();
|
||||
prevSample = NULL;
|
||||
|
||||
texture->SetImage(lpImageBuffer, GS_IMAGEFORMAT_RGBX, renderCX*4);
|
||||
}
|
||||
|
||||
if(SUCCEEDED(lastSample->GetPointer(&lpImage)))
|
||||
{
|
||||
LPBYTE lpData;
|
||||
UINT pitch;
|
||||
|
||||
if(texture->Map(lpData, pitch))
|
||||
for(UINT i=0; i<numProcessors; i++)
|
||||
{
|
||||
PackPlanar(lpData, lpImage);
|
||||
texture->Unmap();
|
||||
convertData[i].input = lpImage;
|
||||
convertData[i].pitch = renderCX*4;
|
||||
convertData[i].output = lpImageBuffer;
|
||||
hConvertThreads[i] = OSCreateThread((XTHREAD)PackPlanarThread, (LPVOID)(convertData+i));
|
||||
}
|
||||
}
|
||||
|
||||
prevSample = lastSample;
|
||||
}
|
||||
else if(colorType == DeviceOutputType_YVYU || colorType == DeviceOutputType_YUY2)
|
||||
{
|
||||
@ -434,17 +564,16 @@ void DeviceSource::Preprocess()
|
||||
LPBYTE lpData;
|
||||
UINT pitch;
|
||||
|
||||
long chi1 = lastSample->GetSize();
|
||||
long chi2 = lastSample->GetActualDataLength();
|
||||
|
||||
if(texture->Map(lpData, pitch))
|
||||
{
|
||||
Convert422To444(lpData, lpImage, true);
|
||||
texture->Unmap();
|
||||
}
|
||||
}
|
||||
|
||||
lastSample->Release();
|
||||
}
|
||||
else if(colorType == DeviceOutputType_UYVY)
|
||||
else if(colorType == DeviceOutputType_UYVY || colorType == DeviceOutputType_HDYC)
|
||||
{
|
||||
if(SUCCEEDED(lastSample->GetPointer(&lpImage)))
|
||||
{
|
||||
@ -457,9 +586,9 @@ void DeviceSource::Preprocess()
|
||||
texture->Unmap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
lastSample->Release();
|
||||
lastSample->Release();
|
||||
}
|
||||
|
||||
bReadyToDraw = true;
|
||||
}
|
||||
@ -475,8 +604,22 @@ void DeviceSource::Render(const Vect2 &pos, const Vect2 &size)
|
||||
{
|
||||
Shader *oldShader = GetCurrentPixelShader();
|
||||
if(colorConvertShader)
|
||||
{
|
||||
LoadPixelShader(colorConvertShader);
|
||||
|
||||
if(bUseColorKey)
|
||||
{
|
||||
float fSimilarity = float(keySimilarity)/100.0f;
|
||||
float fBlendVal = float(max(keyBlend, 1)/100.0f);
|
||||
float fGammaVal = 1.0f+(float(keyGamma)/100.0f);
|
||||
|
||||
colorConvertShader->SetColor(colorConvertShader->GetParameterByName(TEXT("colorKey")), keyColor);
|
||||
colorConvertShader->SetFloat(colorConvertShader->GetParameterByName(TEXT("similarity")), fSimilarity);
|
||||
colorConvertShader->SetFloat(colorConvertShader->GetParameterByName(TEXT("blend")), fBlendVal);
|
||||
colorConvertShader->SetFloat(colorConvertShader->GetParameterByName(TEXT("gamma")), fGammaVal);
|
||||
}
|
||||
}
|
||||
|
||||
bool bFlip = bFlipVertical;
|
||||
|
||||
if(colorType != DeviceOutputType_RGB)
|
||||
@ -498,16 +641,72 @@ void DeviceSource::UpdateSettings()
|
||||
{
|
||||
traceIn(DeviceSource::UpdateSettings);
|
||||
|
||||
bool bWasCapturing = bCapturing;
|
||||
String strNewDevice = data->GetString(TEXT("device"));
|
||||
UINT newFPS = data->GetInt(TEXT("fps"));
|
||||
UINT newCX = data->GetInt(TEXT("resolutionWidth"));
|
||||
UINT newCY = data->GetInt(TEXT("resolutionHeight"));
|
||||
BOOL bNewCustom = data->GetInt(TEXT("customResolution"));
|
||||
|
||||
if(bWasCapturing) Stop();
|
||||
if(renderCX != newCX || renderCY != newCY || fps != newFPS || !strDevice.CompareI(strNewDevice) || bNewCustom != bUseCustomResolution)
|
||||
{
|
||||
bool bWasCapturing = bCapturing;
|
||||
if(bWasCapturing) Stop();
|
||||
|
||||
UnloadFilters();
|
||||
LoadFilters();
|
||||
UnloadFilters();
|
||||
LoadFilters();
|
||||
|
||||
if(bWasCapturing) Start();
|
||||
if(bWasCapturing) Start();
|
||||
}
|
||||
|
||||
traceOut;
|
||||
}
|
||||
|
||||
void DeviceSource::SetInt(CTSTR lpName, int iVal)
|
||||
{
|
||||
if(bCapturing)
|
||||
{
|
||||
if(scmpi(lpName, TEXT("useColorKey")) == 0)
|
||||
{
|
||||
bool bNewVal = iVal != 0;
|
||||
if(bUseColorKey != bNewVal)
|
||||
{
|
||||
API->EnterSceneMutex();
|
||||
bUseColorKey = bNewVal;
|
||||
|
||||
if(colorConvertShader)
|
||||
{
|
||||
delete colorConvertShader;
|
||||
colorConvertShader = NULL;
|
||||
}
|
||||
|
||||
String strShader;
|
||||
strShader = ChooseShader();
|
||||
|
||||
if(strShader.IsValid())
|
||||
colorConvertShader = CreatePixelShaderFromFile(strShader);
|
||||
|
||||
API->LeaveSceneMutex();
|
||||
}
|
||||
}
|
||||
else if(scmpi(lpName, TEXT("flipImage")) == 0)
|
||||
{
|
||||
bFlipVertical = iVal != 0;
|
||||
}
|
||||
else if(scmpi(lpName, TEXT("keyColor")) == 0)
|
||||
{
|
||||
keyColor = (DWORD)iVal;
|
||||
}
|
||||
else if(scmpi(lpName, TEXT("keySimilarity")) == 0)
|
||||
{
|
||||
keySimilarity = iVal;
|
||||
}
|
||||
else if(scmpi(lpName, TEXT("keyBlend")) == 0)
|
||||
{
|
||||
keyBlend = iVal;
|
||||
}
|
||||
else if(scmpi(lpName, TEXT("keyGamma")) == 0)
|
||||
{
|
||||
keyGamma = iVal;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -19,6 +19,7 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
void PackPlanar(LPBYTE convertBuffer, LPBYTE lpPlanar, UINT renderCX, UINT renderCY, UINT pitch, UINT startY, UINT endY);
|
||||
|
||||
enum DeviceColorType
|
||||
{
|
||||
@ -32,6 +33,15 @@ enum DeviceColorType
|
||||
DeviceOutputType_YVYU,
|
||||
DeviceOutputType_YUY2,
|
||||
DeviceOutputType_UYVY,
|
||||
DeviceOutputType_HDYC,
|
||||
};
|
||||
|
||||
struct ConvertData
|
||||
{
|
||||
LPBYTE input, output;
|
||||
UINT width, height;
|
||||
UINT pitch;
|
||||
UINT startY, endY;
|
||||
};
|
||||
|
||||
class DeviceSource : public ImageSource
|
||||
@ -49,9 +59,14 @@ class DeviceSource : public ImageSource
|
||||
|
||||
DeviceColorType colorType;
|
||||
|
||||
String strDevice;
|
||||
bool bFlipVertical;
|
||||
UINT fps;
|
||||
UINT renderCX, renderCY;
|
||||
BOOL bUseCustomResolution;
|
||||
|
||||
bool bFirstFrame;
|
||||
bool bUseThreadedConversion;
|
||||
bool bReadyToDraw;
|
||||
|
||||
Texture *texture;
|
||||
@ -59,11 +74,27 @@ class DeviceSource : public ImageSource
|
||||
XElement *data;
|
||||
bool bCapturing, bFiltersLoaded;
|
||||
IMediaSample *curSample;
|
||||
IMediaSample *prevSample;
|
||||
Shader *colorConvertShader;
|
||||
|
||||
//---------------------------------
|
||||
|
||||
void PackPlanar(LPBYTE convertBuffer, LPBYTE lpPlanar);
|
||||
LPBYTE lpImageBuffer;
|
||||
ConvertData *convertData;
|
||||
HANDLE *hConvertThreads;
|
||||
|
||||
//---------------------------------
|
||||
|
||||
bool bUseColorKey;
|
||||
DWORD keyColor;
|
||||
int keySimilarity;
|
||||
int keyBlend;
|
||||
int keyGamma;
|
||||
|
||||
//---------------------------------
|
||||
|
||||
String ChooseShader();
|
||||
|
||||
void Convert422To444(LPBYTE convertBuffer, LPBYTE lp422, bool bLeadingY);
|
||||
|
||||
void FlushSamples()
|
||||
@ -93,6 +124,8 @@ public:
|
||||
void BeginScene();
|
||||
void EndScene();
|
||||
|
||||
virtual void SetInt(CTSTR lpName, int iVal);
|
||||
|
||||
Vect2 GetSize() const {return Vect2(float(renderCX), float(renderCY));}
|
||||
};
|
||||
|
||||
|
@ -19,253 +19,39 @@
|
||||
|
||||
#include "DShowPlugin.h"
|
||||
|
||||
#ifdef _WIN64
|
||||
|
||||
void DeviceSource::PackPlanar(LPBYTE convertBuffer, LPBYTE lpPlanar)
|
||||
{
|
||||
UINT halfWidth = renderCX/2;
|
||||
UINT halfHeight = renderCY/2;
|
||||
|
||||
LPBYTE output = convertBuffer;
|
||||
LPBYTE chromaPlanes[2];
|
||||
chromaPlanes[0] = lpPlanar+(renderCX*renderCY);
|
||||
chromaPlanes[1] = chromaPlanes[0]+(halfWidth*halfHeight);
|
||||
|
||||
//----------------------------------------------------------
|
||||
// lum val
|
||||
DWORD size = renderCX*renderCY;
|
||||
DWORD dwQWSize = size>>3;
|
||||
QWORD *inputQW = (QWORD*)lpPlanar;
|
||||
QWORD *inputQWEnd = inputQW+dwQWSize;
|
||||
|
||||
while(inputQW < inputQWEnd)
|
||||
{
|
||||
register QWORD qw = *inputQW;
|
||||
|
||||
*output = BYTE(qw);
|
||||
output[4] = BYTE(qw>>=8);
|
||||
output[8] = BYTE(qw>>=8);
|
||||
output[12] = BYTE(qw>>=8);
|
||||
output[16] = BYTE(qw>>=8);
|
||||
output[20] = BYTE(qw>>=8);
|
||||
output[24] = BYTE(qw>>=8);
|
||||
output[28] = BYTE(qw>>=8);
|
||||
|
||||
output += 32;
|
||||
inputQW++;
|
||||
}
|
||||
|
||||
LPBYTE input = (LPBYTE)inputQW;
|
||||
size &= 7;
|
||||
while(size--)
|
||||
{
|
||||
*output = *input;
|
||||
|
||||
output += 4;
|
||||
input++;
|
||||
}
|
||||
|
||||
//----------------------------------------------------------
|
||||
// chroma 1
|
||||
|
||||
for(UINT i=0; i<2; i++)
|
||||
{
|
||||
output = convertBuffer+i+1;
|
||||
dwQWSize = halfWidth>>3;
|
||||
|
||||
for(UINT y=0; y<renderCY; y++)
|
||||
{
|
||||
size = halfWidth;
|
||||
inputQW = (QWORD*)(chromaPlanes[i]+(halfWidth*(y>>1)));
|
||||
inputQWEnd = inputQW+dwQWSize;
|
||||
|
||||
while(inputQW < inputQWEnd)
|
||||
{
|
||||
register QWORD qw = *inputQW;
|
||||
|
||||
*output = BYTE(qw);
|
||||
output[4] = BYTE(qw);
|
||||
output[8] = BYTE(qw>>=8);
|
||||
output[12] = BYTE(qw);
|
||||
output[16] = BYTE(qw>>=8);
|
||||
output[20] = BYTE(qw);
|
||||
output[24] = BYTE(qw>>=8);
|
||||
output[28] = BYTE(qw);
|
||||
output[32] = BYTE(qw>>=8);
|
||||
output[36] = BYTE(qw);
|
||||
output[40] = BYTE(qw>>=8);
|
||||
output[44] = BYTE(qw);
|
||||
output[48] = BYTE(qw>>=8);
|
||||
output[52] = BYTE(qw);
|
||||
output[56] = BYTE(qw>>=8);
|
||||
output[60] = BYTE(qw);
|
||||
|
||||
output += 64;
|
||||
inputQW++;
|
||||
}
|
||||
|
||||
input = (LPBYTE)inputQW;
|
||||
size &= 7;
|
||||
while(size--)
|
||||
{
|
||||
register BYTE byte = *input;
|
||||
*output = byte;
|
||||
output[4] = byte;
|
||||
|
||||
output += 8;
|
||||
input++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#else
|
||||
|
||||
void DeviceSource::PackPlanar(LPBYTE convertBuffer, LPBYTE lpPlanar)
|
||||
{
|
||||
UINT halfWidth = renderCX/2;
|
||||
UINT halfHeight = renderCY/2;
|
||||
|
||||
LPBYTE output = convertBuffer;
|
||||
LPBYTE chromaPlanes[2];
|
||||
chromaPlanes[0] = lpPlanar+(renderCX*renderCY);
|
||||
chromaPlanes[1] = chromaPlanes[0]+(halfWidth*halfHeight);
|
||||
|
||||
//----------------------------------------------------------
|
||||
// lum val
|
||||
DWORD size = renderCX*renderCY;
|
||||
DWORD dwDWSize = size>>2;
|
||||
LPDWORD inputDW = (LPDWORD)lpPlanar;
|
||||
LPDWORD inputDWEnd = inputDW+dwDWSize;
|
||||
|
||||
while(inputDW < inputDWEnd)
|
||||
{
|
||||
register DWORD dw = *inputDW;
|
||||
|
||||
*output = BYTE(dw);
|
||||
output[4] = BYTE(dw>>=8);
|
||||
output[8] = BYTE(dw>>=8);
|
||||
output[12] = BYTE(dw>>=8);
|
||||
|
||||
output += 16;
|
||||
inputDW++;
|
||||
}
|
||||
|
||||
LPBYTE input = (LPBYTE)inputDW;
|
||||
size &= 3;
|
||||
while(size--)
|
||||
{
|
||||
*output = *input;
|
||||
|
||||
output += 4;
|
||||
input++;
|
||||
}
|
||||
|
||||
//----------------------------------------------------------
|
||||
// chroma 1
|
||||
|
||||
for(UINT i=0; i<2; i++)
|
||||
{
|
||||
output = convertBuffer+i+1;
|
||||
dwDWSize = halfWidth>>2;
|
||||
|
||||
for(UINT y=0; y<renderCY; y++)
|
||||
{
|
||||
size = halfWidth;
|
||||
inputDW = (LPDWORD)(chromaPlanes[i]+(halfWidth*(y>>1)));
|
||||
inputDWEnd = inputDW+dwDWSize;
|
||||
|
||||
while(inputDW < inputDWEnd)
|
||||
{
|
||||
register DWORD dw = *inputDW;
|
||||
|
||||
*output = BYTE(dw);
|
||||
output[4] = BYTE(dw);
|
||||
output[8] = BYTE(dw>>=8);
|
||||
output[12] = BYTE(dw);
|
||||
output[16] = BYTE(dw>>=8);
|
||||
output[20] = BYTE(dw);
|
||||
output[24] = BYTE(dw>>=8);
|
||||
output[28] = BYTE(dw);
|
||||
|
||||
output += 32;
|
||||
inputDW++;
|
||||
}
|
||||
|
||||
input = (LPBYTE)inputDW;
|
||||
size &= 3;
|
||||
while(size--)
|
||||
{
|
||||
register BYTE byte = *input;
|
||||
*output = byte;
|
||||
output[4] = byte;
|
||||
|
||||
output += 8;
|
||||
input++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
/*void DeviceSource::PackPlanar(LPBYTE lpPlanar)
|
||||
//now properly takes CPU cache into account - it's just so much faster than it was.
|
||||
void PackPlanar(LPBYTE convertBuffer, LPBYTE lpPlanar, UINT renderCX, UINT renderCY, UINT pitch, UINT startY, UINT endY)
|
||||
{
|
||||
LPBYTE output = convertBuffer;
|
||||
LPBYTE input = lpPlanar;
|
||||
LPBYTE input2 = input+(renderCX*renderCY);
|
||||
LPBYTE input3 = input2+(renderCX*renderCY/4);
|
||||
|
||||
//lum val
|
||||
for(UINT y=0; y<renderCY; y++)
|
||||
UINT halfStartY = startY/2;
|
||||
UINT halfX = renderCX/2;
|
||||
UINT halfY = endY/2;
|
||||
|
||||
for(UINT y=halfStartY; y<halfY; y++)
|
||||
{
|
||||
for(UINT x=0; x<renderCX; x++)
|
||||
{
|
||||
*output = *input;
|
||||
LPBYTE lpLum1 = input + y*2*renderCX;
|
||||
LPBYTE lpLum2 = lpLum1 + renderCX;
|
||||
LPBYTE lpChroma1 = input2 + y*halfX;
|
||||
LPBYTE lpChroma2 = input3 + y*halfX;
|
||||
LPDWORD output1 = (LPDWORD)(output + (y*2)*pitch);
|
||||
LPDWORD output2 = (LPDWORD)(((LPBYTE)output1)+pitch);
|
||||
|
||||
output += 4;
|
||||
input++;
|
||||
for(UINT x=0; x<halfX; x++)
|
||||
{
|
||||
DWORD out = (*(lpChroma1++) << 8) | (*(lpChroma2++) << 16);
|
||||
|
||||
*(output1++) = *(lpLum1++) | out;
|
||||
*(output1++) = *(lpLum1++) | out;
|
||||
|
||||
*(output2++) = *(lpLum2++) | out;
|
||||
*(output2++) = *(lpLum2++) | out;
|
||||
}
|
||||
}
|
||||
|
||||
//chroma 1
|
||||
output = convertBuffer+1;
|
||||
LPBYTE plane1 = lpPlanar+(renderCX*renderCY);
|
||||
UINT halfWidth = renderCX/2;
|
||||
UINT halfHeight = renderCY/2;
|
||||
|
||||
for(UINT y=0; y<renderCY; y++)
|
||||
{
|
||||
input = plane1+(halfWidth*(y/2));
|
||||
|
||||
for(UINT x=0; x<halfWidth; x++)
|
||||
{
|
||||
*output = *input;
|
||||
output += 4;
|
||||
*output = *input;
|
||||
output += 4;
|
||||
|
||||
input++;
|
||||
}
|
||||
}
|
||||
|
||||
//chroma 1
|
||||
output = convertBuffer+2;
|
||||
LPBYTE plane2 = plane1+(halfWidth*halfHeight);
|
||||
|
||||
for(UINT y=0; y<renderCY; y++)
|
||||
{
|
||||
input = plane2+(halfWidth*(y/2));
|
||||
|
||||
for(UINT x=0; x<halfWidth; x++)
|
||||
{
|
||||
*output = *input;
|
||||
output += 4;
|
||||
*output = *input;
|
||||
output += 4;
|
||||
|
||||
input++;
|
||||
}
|
||||
}
|
||||
}*/
|
||||
}
|
||||
|
||||
void DeviceSource::Convert422To444(LPBYTE convertBuffer, LPBYTE lp422, bool bLeadingY)
|
||||
{
|
||||
|
@ -12,6 +12,20 @@
|
||||
#define IDC_FLIPIMAGE 1014
|
||||
#define IDC_BUTTON1 1015
|
||||
#define IDC_CUSTOM 1015
|
||||
#define IDC_SELECTCOLOR 1015
|
||||
#define IDC_AUTOMATICRESOLUTION 1016
|
||||
#define IDC_CUSTOMRESOLUTION 1016
|
||||
#define IDC_USECHROMAKEY 1017
|
||||
#define IDC_USECOLORKEY 1017
|
||||
#define IDC_COLOR 1018
|
||||
#define IDC_BASETHRESHOLD 1019
|
||||
#define IDC_BASETHRESHOLD_EDIT 1020
|
||||
#define IDC_BASETHRESHOLD_CHI 1021
|
||||
#define IDC_BLEND_EDIT 1021
|
||||
#define IDC_BLEND 1022
|
||||
#define IDC_GAMMA_EDIT 1023
|
||||
#define IDC_BASETHRESHOLD3 1024
|
||||
#define IDC_GAMMA 1024
|
||||
|
||||
// Next default values for new objects
|
||||
//
|
||||
@ -19,7 +33,7 @@
|
||||
#ifndef APSTUDIO_READONLY_SYMBOLS
|
||||
#define _APS_NEXT_RESOURCE_VALUE 102
|
||||
#define _APS_NEXT_COMMAND_VALUE 40001
|
||||
#define _APS_NEXT_CONTROL_VALUE 1016
|
||||
#define _APS_NEXT_CONTROL_VALUE 1022
|
||||
#define _APS_NEXT_SYMED_VALUE 101
|
||||
#endif
|
||||
#endif
|
||||
|
8
OBS.rc
8
OBS.rc
@ -522,8 +522,8 @@ END
|
||||
//
|
||||
|
||||
VS_VERSION_INFO VERSIONINFO
|
||||
FILEVERSION 0,4,2,1
|
||||
PRODUCTVERSION 0,4,2,1
|
||||
FILEVERSION 0,4,3,0
|
||||
PRODUCTVERSION 0,4,3,0
|
||||
FILEFLAGSMASK 0x17L
|
||||
#ifdef _DEBUG
|
||||
FILEFLAGS 0x1L
|
||||
@ -539,12 +539,12 @@ BEGIN
|
||||
BLOCK "041104b0"
|
||||
BEGIN
|
||||
VALUE "FileDescription", "Open Broadcaster Software"
|
||||
VALUE "FileVersion", "0, 4, 2, 1"
|
||||
VALUE "FileVersion", "0, 4, 3, 0"
|
||||
VALUE "InternalName", "OBS"
|
||||
VALUE "LegalCopyright", "Copyright (C) 2012"
|
||||
VALUE "OriginalFilename", "OBS.exe"
|
||||
VALUE "ProductName", "Open Broadcaster Software"
|
||||
VALUE "ProductVersion", "0, 4, 2, 1"
|
||||
VALUE "ProductVersion", "0, 4, 3, 0"
|
||||
END
|
||||
END
|
||||
BLOCK "VarFileInfo"
|
||||
|
@ -75,6 +75,7 @@ public:
|
||||
virtual void GetBaseSize(UINT &width, UINT &height) const=0;
|
||||
virtual void GetRenderFrameSize(UINT &width, UINT &height) const=0;
|
||||
virtual void GetOutputSize(UINT &width, UINT &height) const=0;
|
||||
virtual UINT GetMaxFPS() const=0;
|
||||
|
||||
virtual CTSTR GetLanguage() const=0;
|
||||
|
||||
@ -89,6 +90,22 @@ public:
|
||||
virtual void RemoveStreamInfo(UINT infoID)=0;
|
||||
|
||||
inline bool SSE2Available() {return bSSE2Availabe;}
|
||||
|
||||
inline ImageSource* GetSceneImageSource(CTSTR lpImageSource)
|
||||
{
|
||||
Scene *scene = GetScene();
|
||||
if(scene)
|
||||
{
|
||||
SceneItem *item = scene->GetSceneItem(lpImageSource);
|
||||
if(item)
|
||||
{
|
||||
if(item->GetSource())
|
||||
return item->GetSource();
|
||||
}
|
||||
}
|
||||
|
||||
return NULL;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
@ -192,7 +192,7 @@
|
||||
/>
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="2"
|
||||
Optimization="3"
|
||||
EnableIntrinsicFunctions="true"
|
||||
PreprocessorDefinitions="WIN32;NDEBUG;_WINDOWS;_USRDLL;OBSAPI_EXPORTS;BASE_EXPORTING"
|
||||
ExceptionHandling="2"
|
||||
@ -270,7 +270,7 @@
|
||||
/>
|
||||
<Tool
|
||||
Name="VCCLCompilerTool"
|
||||
Optimization="2"
|
||||
Optimization="3"
|
||||
EnableIntrinsicFunctions="true"
|
||||
PreprocessorDefinitions="WIN32;NDEBUG;_WINDOWS;_USRDLL;OBSAPI_EXPORTS;BASE_EXPORTING"
|
||||
ExceptionHandling="2"
|
||||
|
@ -36,6 +36,34 @@ public:
|
||||
|
||||
virtual void BeginScene() {}
|
||||
virtual void EndScene() {}
|
||||
|
||||
virtual void SetFloat(CTSTR lpName, float fValue) {}
|
||||
virtual void SetInt(CTSTR lpName, int iValue) {}
|
||||
virtual void SetString(CTSTR lpName, CTSTR lpVal) {}
|
||||
virtual void SetVector(CTSTR lpName, const Vect &value) {}
|
||||
virtual void SetVector2(CTSTR lpName, const Vect2 &value) {}
|
||||
virtual void SetVector4(CTSTR lpName, const Vect4 &value) {}
|
||||
virtual void SetMatrix(CTSTR lpName, const Matrix &mat) {}
|
||||
|
||||
inline void SetColor(CTSTR lpName, const Color4 &value) {SetVector4(lpName, value);}
|
||||
inline void SetColor(CTSTR lpName, float fR, float fB, float fG, float fA=1.0f){SetVector4(lpName, Color4(fR, fB, fG, fA));}
|
||||
inline void SetColor(CTSTR lpName, DWORD color) {SetVector4(lpName, RGBA_to_Vect4(color));}
|
||||
|
||||
inline void SetColor3(CTSTR lpName, const Color3 &value) {SetVector(lpName, value);}
|
||||
inline void SetColor3(CTSTR lpName, float fR, float fB, float fG) {SetVector(lpName, Color3(fR, fB, fG));}
|
||||
inline void SetColor3(CTSTR lpName, DWORD color) {SetVector(lpName, RGB_to_Vect(color));}
|
||||
inline void SetVector4(CTSTR lpName, float fX, float fY, float fZ, float fW) {SetVector4(lpName, Vect4(fX, fY, fZ, fW));}
|
||||
inline void SetVector(CTSTR lpName, float fX, float fY, float fZ) {SetVector(lpName, Vect(fX, fY, fZ));}
|
||||
|
||||
//-------------------------------------------------------------
|
||||
|
||||
virtual bool GetFloat(CTSTR lpName, float &fValue) const {return false;}
|
||||
virtual bool GetInt(CTSTR lpName, int &iValue) const {return false;}
|
||||
virtual bool GetString(CTSTR lpName, String &strVal) const {return false;}
|
||||
virtual bool GetVector(CTSTR lpName, Vect &value) const {return false;}
|
||||
virtual bool GetVector2(CTSTR lpName, Vect2 &value) const {return false;}
|
||||
virtual bool GetVector4(CTSTR lpName, Vect4 &value) const {return false;}
|
||||
virtual bool GetMatrix(CTSTR lpName, Matrix &mat) const {return false;}
|
||||
};
|
||||
|
||||
|
||||
|
@ -120,6 +120,7 @@ BASE_EXPORT void STDCALL OSFreeLibrary(HANDLE hLibrary);
|
||||
|
||||
BASE_EXPORT void STDCALL OSSleep(DWORD dwMSeconds);
|
||||
|
||||
BASE_EXPORT UINT STDCALL OSGetProcessorCount();
|
||||
BASE_EXPORT HANDLE STDCALL OSCreateThread(XTHREAD lpThreadFunc, LPVOID param);
|
||||
BASE_EXPORT BOOL STDCALL OSWaitForThread(HANDLE hThread, LPDWORD ret);
|
||||
BASE_EXPORT BOOL STDCALL OSCloseThread(HANDLE hThread);
|
||||
|
@ -40,6 +40,7 @@ DWORD startTick;
|
||||
void STDCALL InputProc(HWND hwnd, UINT message, WPARAM wParam, LPARAM lParam);
|
||||
void STDCALL ResetCursorClip();
|
||||
|
||||
SYSTEM_INFO si;
|
||||
|
||||
BOOL bHidingCursor = 0;
|
||||
|
||||
@ -50,6 +51,8 @@ void STDCALL OSInit()
|
||||
{
|
||||
timeBeginPeriod(1);
|
||||
|
||||
GetSystemInfo(&si);
|
||||
|
||||
QueryPerformanceFrequency(&clockFreq);
|
||||
QueryPerformanceCounter(&startTime);
|
||||
startTick = GetTickCount();
|
||||
@ -160,9 +163,7 @@ void STDCALL OSFindClose(HANDLE hFind)
|
||||
|
||||
DWORD STDCALL OSGetSysPageSize()
|
||||
{
|
||||
SYSTEM_INFO SI;
|
||||
GetSystemInfo(&SI);
|
||||
return SI.dwPageSize;
|
||||
return si.dwPageSize;
|
||||
}
|
||||
|
||||
LPVOID STDCALL OSVirtualAlloc(size_t dwSize)
|
||||
@ -445,6 +446,11 @@ QWORD STDCALL OSGetTimeMicroseconds()
|
||||
}
|
||||
|
||||
|
||||
UINT STDCALL OSGetProcessorCount()
|
||||
{
|
||||
return si.dwNumberOfProcessors;
|
||||
}
|
||||
|
||||
HANDLE STDCALL OSCreateThread(XTHREAD lpThreadFunc, LPVOID param)
|
||||
{
|
||||
DWORD dummy;
|
||||
|
@ -276,13 +276,17 @@ HANDLE D3D10Shader::GetParameterByName(CTSTR lpName) const
|
||||
return NULL;
|
||||
}
|
||||
|
||||
#define GetValidHandle() \
|
||||
/*#define GetValidHandle() \
|
||||
ShaderParam *param = (ShaderParam*)hObject; \
|
||||
if(!hObject) \
|
||||
{ \
|
||||
AppWarning(TEXT("Invalid handle input as shader parameter")); \
|
||||
return; \
|
||||
}
|
||||
}*/
|
||||
#define GetValidHandle() \
|
||||
ShaderParam *param = (ShaderParam*)hObject; \
|
||||
if(!hObject) \
|
||||
return;
|
||||
|
||||
|
||||
void D3D10Shader::GetParameterInfo(HANDLE hObject, ShaderParameterInfo ¶mInfo) const
|
||||
|
@ -72,7 +72,7 @@ D3D10System::D3D10System()
|
||||
swapDesc.SampleDesc.Count = 1;
|
||||
swapDesc.Windowed = TRUE;
|
||||
|
||||
bDisableCompatibilityMode = AppConfig->GetInt(TEXT("Video"), TEXT("DisableD3DCompatibilityMode"), 1) != 0;
|
||||
bDisableCompatibilityMode = 1;//AppConfig->GetInt(TEXT("Video"), TEXT("DisableD3DCompatibilityMode"), 1) != 0;
|
||||
|
||||
UINT createFlags = D3D10_CREATE_DEVICE_BGRA_SUPPORT;
|
||||
if(GlobalConfig->GetInt(TEXT("General"), TEXT("UseDebugD3D")))
|
||||
|
@ -209,13 +209,13 @@ public:
|
||||
return false;
|
||||
}
|
||||
|
||||
if(!bFirstFrameProcessed && nalNum)
|
||||
/*if(!bFirstFrameProcessed && nalNum)
|
||||
{
|
||||
delayTime = -picOut.i_dts;
|
||||
bFirstFrameProcessed = true;
|
||||
}
|
||||
}*/
|
||||
|
||||
int timeOffset = int(INT64(picOut.i_pts+delayTime)-INT64(outputTimestamp));
|
||||
int timeOffset = int(INT64(picOut.i_pts/*+delayTime*/)-INT64(outputTimestamp));
|
||||
//Log(TEXT("dts: %d, pts: %d, timestamp: %d, offset: %d"), picOut.i_dts, picOut.i_pts, outputTimestamp, timeOffset);
|
||||
|
||||
timeOffset = htonl(timeOffset);
|
||||
|
@ -46,6 +46,26 @@ public:
|
||||
String strName = data->GetString(TEXT("name"));
|
||||
globalSource = App->GetGlobalSource(strName);
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------
|
||||
|
||||
virtual void SetFloat(CTSTR lpName, float fValue) {globalSource->SetFloat(lpName, fValue);}
|
||||
virtual void SetInt(CTSTR lpName, int iValue) {globalSource->SetInt(lpName, iValue);}
|
||||
virtual void SetString(CTSTR lpName, CTSTR lpVal) {globalSource->SetString(lpName, lpVal);}
|
||||
virtual void SetVector(CTSTR lpName, const Vect &value) {globalSource->SetVector(lpName, value);}
|
||||
virtual void SetVector2(CTSTR lpName, const Vect2 &value) {globalSource->SetVector2(lpName, value);}
|
||||
virtual void SetVector4(CTSTR lpName, const Vect4 &value) {globalSource->SetVector4(lpName, value);}
|
||||
virtual void SetMatrix(CTSTR lpName, const Matrix &mat) {globalSource->SetMatrix(lpName, mat);}
|
||||
|
||||
//-------------------------------------------------------------
|
||||
|
||||
virtual bool GetFloat(CTSTR lpName, float &fValue) const {return globalSource->GetFloat(lpName, fValue);}
|
||||
virtual bool GetInt(CTSTR lpName, int &iValue) const {return globalSource->GetInt(lpName, iValue);}
|
||||
virtual bool GetString(CTSTR lpName, String &strVal) const {return globalSource->GetString(lpName, strVal);}
|
||||
virtual bool GetVector(CTSTR lpName, Vect &value) const {return globalSource->GetVector(lpName, value);}
|
||||
virtual bool GetVector2(CTSTR lpName, Vect2 &value) const {return globalSource->GetVector2(lpName, value);}
|
||||
virtual bool GetVector4(CTSTR lpName, Vect4 &value) const {return globalSource->GetVector4(lpName, value);}
|
||||
virtual bool GetMatrix(CTSTR lpName, Matrix &mat) const {return globalSource->GetMatrix(lpName, mat);}
|
||||
};
|
||||
|
||||
|
||||
|
@ -19,7 +19,7 @@
|
||||
#include "Main.h"
|
||||
|
||||
|
||||
void Convert444to420(LPBYTE input, int width, int pitch, int height, LPBYTE *output, bool bSSE2Available)
|
||||
void Convert444to420(LPBYTE input, int width, int pitch, int height, int startY, int endY, LPBYTE *output, bool bSSE2Available)
|
||||
{
|
||||
traceIn(Convert444to420);
|
||||
|
||||
@ -33,7 +33,7 @@ void Convert444to420(LPBYTE input, int width, int pitch, int height, LPBYTE *out
|
||||
__m128i lumMask = _mm_set1_epi32(0x0000FF00);
|
||||
__m128i uvMask = _mm_set1_epi16(0x00FF);
|
||||
|
||||
for(int y=0; y<height; y+=2)
|
||||
for(int y=startY; y<endY; y+=2)
|
||||
{
|
||||
int yPos = y*pitch;
|
||||
int chrYPos = ((y>>1)*chrPitch);
|
||||
@ -77,7 +77,7 @@ void Convert444to420(LPBYTE input, int width, int pitch, int height, LPBYTE *out
|
||||
else
|
||||
{
|
||||
#ifdef _WIN64
|
||||
for(int y=0; y<height; y+=2)
|
||||
for(int y=startY; y<endY; y+=2)
|
||||
{
|
||||
int yPos = y*pitch;
|
||||
int chrYPos = ((y>>1)*chrPitch);
|
||||
@ -106,7 +106,7 @@ void Convert444to420(LPBYTE input, int width, int pitch, int height, LPBYTE *out
|
||||
}
|
||||
}
|
||||
#else
|
||||
for(int y=0; y<height; y+=2)
|
||||
for(int y=startY; y<endY; y+=2)
|
||||
{
|
||||
int yPos = y*pitch;
|
||||
int chrYPos = ((y>>1)*chrPitch);
|
||||
|
@ -138,8 +138,6 @@ public:
|
||||
{
|
||||
strFile = lpFile;
|
||||
|
||||
EnableMemoryTracking(TRUE, 8);
|
||||
|
||||
if(!fileOut.Open(lpFile, XFILE_CREATEALWAYS, 1024*1024))
|
||||
return false;
|
||||
|
||||
|
@ -61,8 +61,8 @@ extern ConfigFile *AppConfig;
|
||||
extern OBS *App;
|
||||
extern TCHAR lpAppDataPath[MAX_PATH];
|
||||
|
||||
#define OBS_VERSION 0x000421
|
||||
#define OBS_VERSION_STRING_ANSI "Open Broadcaster Software v0.421a"
|
||||
#define OBS_VERSION 0x000429
|
||||
#define OBS_VERSION_STRING_ANSI "Open Broadcaster Software v0.43a [test version 1]"
|
||||
#define OBS_VERSION_STRING TEXT(OBS_VERSION_STRING_ANSI)
|
||||
|
||||
#define OBS_WINDOW_CLASS TEXT("OBSWindowClass")
|
||||
|
486
Source/OBS.cpp
486
Source/OBS.cpp
@ -66,7 +66,7 @@ VideoFileStream* CreateMP4FileStream(CTSTR lpFile);
|
||||
VideoFileStream* CreateFLVFileStream(CTSTR lpFile);
|
||||
//VideoFileStream* CreateAVIFileStream(CTSTR lpFile);
|
||||
|
||||
void Convert444to420(LPBYTE input, int width, int pitch, int height, LPBYTE *output, bool bSSE2Available);
|
||||
void Convert444to420(LPBYTE input, int width, int pitch, int height, int startY, int endY, LPBYTE *output, bool bSSE2Available);
|
||||
|
||||
void STDCALL SceneHotkey(DWORD hotkey, UPARAM param, bool bDown);
|
||||
|
||||
@ -410,6 +410,8 @@ public:
|
||||
virtual void GetRenderFrameSize(UINT &width, UINT &height) const {App->GetRenderFrameSize(width, height);}
|
||||
virtual void GetOutputSize(UINT &width, UINT &height) const {App->GetOutputSize(width, height);}
|
||||
|
||||
virtual UINT GetMaxFPS() const {return App->bRunning ? App->fps : AppConfig->GetInt(TEXT("Video"), TEXT("FPS"), 30);}
|
||||
|
||||
virtual CTSTR GetLanguage() const {return App->strLanguage;}
|
||||
|
||||
virtual CTSTR GetAppDataPath() const {return lpAppDataPath;}
|
||||
@ -535,7 +537,7 @@ OBS::OBS()
|
||||
int y = (fullscreenY/2)-(cy/2);
|
||||
|
||||
hwndMain = CreateWindowEx(WS_EX_CONTROLPARENT|WS_EX_WINDOWEDGE, OBS_WINDOW_CLASS, OBS_VERSION_STRING,
|
||||
WS_OVERLAPPED | WS_THICKFRAME | WS_MINIMIZEBOX | WS_CAPTION | WS_SYSMENU,
|
||||
WS_OVERLAPPED | WS_THICKFRAME | WS_MINIMIZEBOX | WS_CAPTION | WS_SYSMENU | WS_CLIPCHILDREN,
|
||||
x, y, cx, cy, NULL, NULL, hinstMain, NULL);
|
||||
if(!hwndMain)
|
||||
CrashError(TEXT("Could not create main window"));
|
||||
@ -866,7 +868,7 @@ OBS::~OBS()
|
||||
pluginInfo.strFile.Clear();
|
||||
}
|
||||
|
||||
DestroyWindow(hwndMain);
|
||||
//DestroyWindow(hwndMain);
|
||||
|
||||
AppConfig->SetInt(TEXT("General"), TEXT("Width"), clientWidth);
|
||||
AppConfig->SetInt(TEXT("General"), TEXT("Height"), clientHeight);
|
||||
@ -1200,11 +1202,14 @@ void OBS::Start()
|
||||
td.Usage = D3D10_USAGE_STAGING;
|
||||
td.CPUAccessFlags = D3D10_CPU_ACCESS_READ;
|
||||
|
||||
HRESULT err = GetD3D()->CreateTexture2D(&td, NULL, ©Texture);
|
||||
if(FAILED(err))
|
||||
for(UINT i=0; i<2; i++)
|
||||
{
|
||||
CrashError(TEXT("Unable to create copy texture"));
|
||||
//todo - better error handling
|
||||
HRESULT err = GetD3D()->CreateTexture2D(&td, NULL, ©Textures[i]);
|
||||
if(FAILED(err))
|
||||
{
|
||||
CrashError(TEXT("Unable to create copy texture"));
|
||||
//todo - better error handling
|
||||
}
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------
|
||||
@ -1395,20 +1400,14 @@ void OBS::SetStatusBarData()
|
||||
{
|
||||
HWND hwndStatusBar = GetDlgItem(hwndMain, ID_STATUS);
|
||||
|
||||
String strInfo = GetMostImportantInfo();
|
||||
SendMessage(hwndStatusBar, SB_SETTEXT, 0, (LPARAM)strInfo.Array());
|
||||
|
||||
String strDroppedFrames;
|
||||
strDroppedFrames << Str("MainWindow.DroppedFrames") << TEXT(" ") << IntString(curFramesDropped);
|
||||
SendMessage(hwndStatusBar, SB_SETTEXT, 1, (LPARAM)strDroppedFrames.Array());
|
||||
|
||||
String strCaptureFPS;
|
||||
strCaptureFPS << TEXT("FPS: ") << IntString(captureFPS);
|
||||
SendMessage(hwndStatusBar, SB_SETTEXT, 2, (LPARAM)strCaptureFPS.Array());
|
||||
|
||||
statusBarData.bytesPerSec = bytesPerSec;
|
||||
statusBarData.strain = curStrain;
|
||||
SendMessage(hwndStatusBar, WM_SETREDRAW, 0, 0);
|
||||
SendMessage(hwndStatusBar, SB_SETTEXT, 0 | SBT_OWNERDRAW, NULL);
|
||||
SendMessage(hwndStatusBar, SB_SETTEXT, 1 | SBT_OWNERDRAW, NULL);
|
||||
SendMessage(hwndStatusBar, SB_SETTEXT, 2 | SBT_OWNERDRAW, NULL);
|
||||
SendMessage(hwndStatusBar, SB_SETTEXT, 3 | SBT_OWNERDRAW, NULL);
|
||||
|
||||
SendMessage(hwndStatusBar, WM_SETREDRAW, 1, 0);
|
||||
InvalidateRect(hwndStatusBar, NULL, FALSE);
|
||||
}
|
||||
|
||||
void OBS::DrawStatusBar(DRAWITEMSTRUCT &dis)
|
||||
@ -1416,49 +1415,86 @@ void OBS::DrawStatusBar(DRAWITEMSTRUCT &dis)
|
||||
if(!App->bRunning)
|
||||
return;
|
||||
|
||||
DWORD green = 0xFF, red = 0xFF;
|
||||
|
||||
if(statusBarData.strain > 50.0)
|
||||
green = DWORD(((50.0-(statusBarData.strain-50.0))/50.0)*255.0);
|
||||
|
||||
double redStrain = statusBarData.strain/50.0;
|
||||
if(redStrain > 1.0)
|
||||
redStrain = 1.0;
|
||||
|
||||
red = DWORD(redStrain*255.0);
|
||||
|
||||
HDC hdcTemp = CreateCompatibleDC(dis.hDC);
|
||||
HBITMAP hbmpTemp = CreateCompatibleBitmap(dis.hDC, dis.rcItem.right-dis.rcItem.left, dis.rcItem.bottom-dis.rcItem.top);
|
||||
SelectObject(hdcTemp, hbmpTemp);
|
||||
|
||||
BitBlt(hdcTemp, 0, 0, dis.rcItem.right-dis.rcItem.left, dis.rcItem.bottom-dis.rcItem.top, dis.hDC, dis.rcItem.left, dis.rcItem.top, SRCCOPY);
|
||||
|
||||
SelectObject(hdcTemp, GetCurrentObject(dis.hDC, OBJ_FONT));
|
||||
|
||||
//--------------------------------
|
||||
|
||||
HBRUSH hColorBrush = CreateSolidBrush((green<<8)|red);
|
||||
|
||||
RECT rc = {0, 0, 20, 20};
|
||||
FillRect(hdcTemp, &rc, hColorBrush);
|
||||
|
||||
DeleteObject(hColorBrush);
|
||||
|
||||
//--------------------------------
|
||||
//HBRUSH hColorBrush = CreateSolidBrush((green<<8)|red);
|
||||
|
||||
RECT rc;
|
||||
mcpy(&rc, &dis.rcItem, sizeof(rc));
|
||||
rc.left += 22;
|
||||
|
||||
rc.left -= dis.rcItem.left;
|
||||
rc.right -= dis.rcItem.left;
|
||||
rc.top -= dis.rcItem.top;
|
||||
rc.bottom -= dis.rcItem.top;
|
||||
|
||||
SetBkMode(hdcTemp, TRANSPARENT);
|
||||
FillRect(hdcTemp, &rc, (HBRUSH)(COLOR_BTNFACE+1));
|
||||
|
||||
String strKBPS;
|
||||
strKBPS << IntString((statusBarData.bytesPerSec*8) >> 10) << TEXT("kb/s");
|
||||
DrawText(hdcTemp, strKBPS, strKBPS.Length(), &rc, DT_VCENTER|DT_SINGLELINE|DT_LEFT);
|
||||
//DeleteObject(hColorBrush);
|
||||
|
||||
//--------------------------------
|
||||
|
||||
if(dis.itemID == 3)
|
||||
{
|
||||
DWORD green = 0xFF, red = 0xFF;
|
||||
|
||||
statusBarData.bytesPerSec = App->bytesPerSec;
|
||||
statusBarData.strain = App->curStrain;
|
||||
//statusBarData.strain = rand()%101;
|
||||
|
||||
if(statusBarData.strain > 50.0)
|
||||
green = DWORD(((50.0-(statusBarData.strain-50.0))/50.0)*255.0);
|
||||
|
||||
double redStrain = statusBarData.strain/50.0;
|
||||
if(redStrain > 1.0)
|
||||
redStrain = 1.0;
|
||||
|
||||
red = DWORD(redStrain*255.0);
|
||||
|
||||
//--------------------------------
|
||||
|
||||
HBRUSH hColorBrush = CreateSolidBrush((green<<8)|red);
|
||||
|
||||
RECT rcBox = {0, 0, 20, 20};
|
||||
/*rc.left += dis.rcItem.left;
|
||||
rc.right += dis.rcItem.left;
|
||||
rc.top += dis.rcItem.top;
|
||||
rc.bottom += dis.rcItem.top;*/
|
||||
FillRect(hdcTemp, &rcBox, hColorBrush);
|
||||
|
||||
DeleteObject(hColorBrush);
|
||||
|
||||
//--------------------------------
|
||||
|
||||
SetBkMode(hdcTemp, TRANSPARENT);
|
||||
|
||||
rc.left += 22;
|
||||
|
||||
String strKBPS;
|
||||
strKBPS << IntString((statusBarData.bytesPerSec*8) >> 10) << TEXT("kb/s");
|
||||
//strKBPS << IntString(rand()) << TEXT("kb/s");
|
||||
DrawText(hdcTemp, strKBPS, strKBPS.Length(), &rc, DT_VCENTER|DT_SINGLELINE|DT_LEFT);
|
||||
}
|
||||
else
|
||||
{
|
||||
String strOutString;
|
||||
|
||||
switch(dis.itemID)
|
||||
{
|
||||
case 0: strOutString << App->GetMostImportantInfo(); break;
|
||||
case 1: strOutString << Str("MainWindow.DroppedFrames") << TEXT(" ") << IntString(App->curFramesDropped); break;
|
||||
case 2: strOutString << TEXT("FPS: ") << IntString(App->captureFPS); break;
|
||||
}
|
||||
|
||||
if(strOutString.IsValid())
|
||||
{
|
||||
SetBkMode(hdcTemp, TRANSPARENT);
|
||||
DrawText(hdcTemp, strOutString, strOutString.Length(), &rc, DT_VCENTER|DT_SINGLELINE|DT_LEFT);
|
||||
}
|
||||
}
|
||||
|
||||
//--------------------------------
|
||||
|
||||
@ -1558,7 +1594,10 @@ void OBS::Stop()
|
||||
yuvRenderTextures[i] = NULL;
|
||||
}
|
||||
|
||||
SafeRelease(copyTexture);
|
||||
for(UINT i=0; i<2; i++)
|
||||
{
|
||||
SafeRelease(copyTextures[i]);
|
||||
}
|
||||
|
||||
delete transitionTexture;
|
||||
transitionTexture = NULL;
|
||||
@ -1685,11 +1724,25 @@ DWORD STDCALL OBS::MainAudioThread(LPVOID lpUnused)
|
||||
return 0;
|
||||
}
|
||||
|
||||
struct Convert444Data
|
||||
{
|
||||
LPBYTE input;
|
||||
LPBYTE output[3];
|
||||
int width, height, pitch, startY, endY;
|
||||
};
|
||||
|
||||
DWORD STDCALL Convert444Thread(Convert444Data *data)
|
||||
{
|
||||
SetThreadPriority(GetCurrentThread(), THREAD_PRIORITY_ABOVE_NORMAL);
|
||||
Convert444to420(data->input, data->width, data->pitch, data->height, data->startY, data->endY, data->output, App->SSE2Available());
|
||||
return 0;
|
||||
}
|
||||
|
||||
void OBS::MainCaptureLoop()
|
||||
{
|
||||
traceIn(OBS::MainCaptureLoop);
|
||||
|
||||
int curRenderTarget = 0, curCopyTexture = 0;
|
||||
int curRenderTarget = 0, curYUVTexture = 0, curCopyTexture = 0;
|
||||
int copyWait = NUM_RENDER_BUFFERS-1;
|
||||
UINT curStreamTime = 0, firstFrameTime = OSGetTime(), lastStreamTime = 0;
|
||||
UINT lastPTSVal = 0, lastUnmodifiedPTSVal = 0;
|
||||
@ -1707,16 +1760,23 @@ void OBS::MainCaptureLoop()
|
||||
|
||||
LPVOID nullBuff = NULL;
|
||||
|
||||
x264_picture_t picOut;
|
||||
x264_picture_init(&picOut);
|
||||
x264_picture_t outPics[2];
|
||||
x264_picture_init(&outPics[0]);
|
||||
x264_picture_init(&outPics[1]);
|
||||
|
||||
if(bUsing444)
|
||||
{
|
||||
picOut.img.i_csp = X264_CSP_BGRA; //although the x264 input says BGR, x264 actually will expect packed UYV
|
||||
picOut.img.i_plane = 1;
|
||||
outPics[0].img.i_csp = X264_CSP_BGRA; //although the x264 input says BGR, x264 actually will expect packed UYV
|
||||
outPics[0].img.i_plane = 1;
|
||||
|
||||
outPics[1].img.i_csp = X264_CSP_BGRA;
|
||||
outPics[1].img.i_plane = 1;
|
||||
}
|
||||
else
|
||||
x264_picture_alloc(&picOut, X264_CSP_I420, outputCX, outputCY);
|
||||
{
|
||||
x264_picture_alloc(&outPics[0], X264_CSP_I420, outputCX, outputCY);
|
||||
x264_picture_alloc(&outPics[1], X264_CSP_I420, outputCX, outputCY);
|
||||
}
|
||||
|
||||
int curPTS = 0;
|
||||
|
||||
@ -1736,6 +1796,29 @@ void OBS::MainCaptureLoop()
|
||||
float bpsTime = 0.0f;
|
||||
double lastStrain = 0.0f;
|
||||
|
||||
UINT numProcessors = OSGetProcessorCount();
|
||||
HANDLE *h420Threads = (HANDLE*)Allocate(sizeof(HANDLE)*numProcessors);
|
||||
Convert444Data *convertInfo = (Convert444Data*)Allocate(sizeof(Convert444Data)*numProcessors);
|
||||
|
||||
zero(h420Threads, sizeof(HANDLE)*numProcessors);
|
||||
zero(convertInfo, sizeof(Convert444Data)*numProcessors);
|
||||
|
||||
for(UINT i=0; i<numProcessors; i++)
|
||||
{
|
||||
convertInfo[i].width = outputCX;
|
||||
convertInfo[i].height = outputCY;
|
||||
|
||||
if(i == 0)
|
||||
convertInfo[i].startY = 0;
|
||||
else
|
||||
convertInfo[i].startY = convertInfo[i-1].endY;
|
||||
|
||||
if(i == (numProcessors-1))
|
||||
convertInfo[i].endY = outputCY;
|
||||
else
|
||||
convertInfo[i].endY = ((outputCY/numProcessors)*(i+1)) & 0xFFFFFFFE;
|
||||
}
|
||||
|
||||
DWORD fpsTimeNumerator = 1000-(frameTime*fps);
|
||||
DWORD fpsTimeDenominator = fps;
|
||||
DWORD fpsTimeAdjust = 0;
|
||||
@ -1743,6 +1826,8 @@ void OBS::MainCaptureLoop()
|
||||
DWORD fpsCounter = 0;
|
||||
|
||||
bool bFirstFrame = true;
|
||||
bool bFirst420Encode = true;
|
||||
bool bUseThreaded420 = OSGetProcessorCount() > 1 && !bUsing444;
|
||||
|
||||
while(bRunning)
|
||||
{
|
||||
@ -1999,36 +2084,75 @@ void OBS::MainCaptureLoop()
|
||||
|
||||
bFirstFrame = false;
|
||||
}
|
||||
}
|
||||
|
||||
if(!bEncode)
|
||||
{
|
||||
if(curCopyTexture == (NUM_RENDER_BUFFERS-1))
|
||||
curCopyTexture = 0;
|
||||
else
|
||||
curCopyTexture++;
|
||||
if(!bEncode)
|
||||
{
|
||||
if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
|
||||
curYUVTexture = 0;
|
||||
else
|
||||
curYUVTexture++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(bEncode)
|
||||
{
|
||||
UINT prevCopyTexture = (curCopyTexture+1) & 1;
|
||||
|
||||
ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
|
||||
profileIn("CopyResource");
|
||||
D3D10Texture *d3dYUV = static_cast<D3D10Texture*>(yuvRenderTextures[curCopyTexture]);
|
||||
|
||||
if(!bFirst420Encode && bUseThreaded420)
|
||||
{
|
||||
WaitForMultipleObjects(numProcessors, h420Threads, TRUE, INFINITE);
|
||||
for(UINT i=0; i<numProcessors; i++)
|
||||
{
|
||||
CloseHandle(h420Threads[i]);
|
||||
h420Threads[i] = NULL;
|
||||
}
|
||||
copyTexture->Unmap(0);
|
||||
}
|
||||
|
||||
D3D10Texture *d3dYUV = static_cast<D3D10Texture*>(yuvRenderTextures[curYUVTexture]);
|
||||
GetD3D()->CopyResource(copyTexture, d3dYUV->texture);
|
||||
profileOut;
|
||||
|
||||
ID3D10Texture2D *prevTexture = copyTextures[prevCopyTexture];
|
||||
|
||||
D3D10_MAPPED_TEXTURE2D map;
|
||||
if(SUCCEEDED(copyTexture->Map(0, D3D10_MAP_READ, 0, &map)))
|
||||
if(SUCCEEDED(prevTexture->Map(0, D3D10_MAP_READ, 0, &map)))
|
||||
{
|
||||
List<DataPacket> videoPackets;
|
||||
List<PacketType> videoPacketTypes;
|
||||
|
||||
x264_picture_t &picOut = outPics[prevCopyTexture];
|
||||
|
||||
if(!bUsing444)
|
||||
{
|
||||
profileIn("conversion to 4:2:0");
|
||||
|
||||
Convert444to420((LPBYTE)map.pData, outputCX, map.RowPitch, outputCY, picOut.img.plane, SSE2Available());
|
||||
copyTexture->Unmap(0);
|
||||
if(bUseThreaded420)
|
||||
{
|
||||
x264_picture_t &newPicOut = outPics[curCopyTexture];
|
||||
|
||||
for(UINT i=0; i<numProcessors; i++)
|
||||
{
|
||||
convertInfo[i].input = (LPBYTE)map.pData;
|
||||
convertInfo[i].pitch = map.RowPitch;
|
||||
convertInfo[i].output[0] = newPicOut.img.plane[0];
|
||||
convertInfo[i].output[1] = newPicOut.img.plane[1];
|
||||
convertInfo[i].output[2] = newPicOut.img.plane[2];
|
||||
h420Threads[i] = OSCreateThread((XTHREAD)Convert444Thread, (LPVOID)(convertInfo+i));
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Convert444to420((LPBYTE)map.pData, outputCX, map.RowPitch, outputCY, 0, outputCY, picOut.img.plane, SSE2Available());
|
||||
prevTexture->Unmap(0);
|
||||
}
|
||||
|
||||
if(bFirst420Encode)
|
||||
bFirst420Encode = bEncode = false;
|
||||
|
||||
profileOut;
|
||||
}
|
||||
@ -2038,127 +2162,132 @@ void OBS::MainCaptureLoop()
|
||||
picOut.img.plane[0] = (uint8_t*)map.pData;
|
||||
}
|
||||
|
||||
//------------------------------------
|
||||
// get timestamps
|
||||
|
||||
DWORD curTimeStamp = 0;
|
||||
DWORD curPTSVal = 0;
|
||||
|
||||
curTimeStamp = bufferedTimes[0];
|
||||
curPTSVal = bufferedTimes[curPTS++];
|
||||
|
||||
if(bUseSyncFix)
|
||||
if(bEncode)
|
||||
{
|
||||
DWORD savedPTSVal = curPTSVal;
|
||||
//------------------------------------
|
||||
// get timestamps
|
||||
|
||||
if(curPTSVal != 0)
|
||||
DWORD curTimeStamp = 0;
|
||||
DWORD curPTSVal = 0;
|
||||
|
||||
curTimeStamp = bufferedTimes[0];
|
||||
curPTSVal = bufferedTimes[curPTS++];
|
||||
|
||||
if(bUseSyncFix)
|
||||
{
|
||||
/*int toleranceVal = int(lastPTSVal+frameTime);
|
||||
int toleranceOffset = (int(curPTSVal)-toleranceVal);
|
||||
int halfFrameTime = int(frameTime/2);
|
||||
DWORD savedPTSVal = curPTSVal;
|
||||
|
||||
if(toleranceOffset > halfFrameTime)
|
||||
curPTSVal = DWORD(toleranceVal+(toleranceOffset-halfFrameTime));
|
||||
else if(toleranceOffset < -halfFrameTime)
|
||||
curPTSVal = DWORD(toleranceVal+(toleranceOffset+halfFrameTime));
|
||||
else
|
||||
curPTSVal = DWORD(toleranceVal);*/
|
||||
|
||||
//this turned out to be much better than the previous way I was doing it.
|
||||
//if the FPS is set to about the same as the capture FPS, this works pretty much flawlessly.
|
||||
//100% calculated timestamps that are almost fully accurate with no CPU timers involved,
|
||||
//while still fully allowing any potential unexpected frame variability.
|
||||
curPTSVal = lastPTSVal+frameTimeAdjust;
|
||||
if(curPTSVal < lastUnmodifiedPTSVal)
|
||||
curPTSVal = lastUnmodifiedPTSVal;
|
||||
|
||||
bufferedTimes[curPTS-1] = curPTSVal;
|
||||
}
|
||||
|
||||
lastUnmodifiedPTSVal = savedPTSVal;
|
||||
lastPTSVal = curPTSVal;
|
||||
|
||||
//Log(TEXT("val: %u - adjusted: %u"), savedPTSVal, curPTSVal);
|
||||
}
|
||||
|
||||
picOut.i_pts = curPTSVal;
|
||||
|
||||
//------------------------------------
|
||||
// encode
|
||||
|
||||
profileIn("call to encoder");
|
||||
|
||||
videoEncoder->Encode(&picOut, videoPackets, videoPacketTypes, curTimeStamp);
|
||||
if(bUsing444) copyTexture->Unmap(0);
|
||||
|
||||
profileOut;
|
||||
|
||||
//------------------------------------
|
||||
// upload
|
||||
|
||||
bool bSendingVideo = videoPackets.Num() > 0;
|
||||
|
||||
//send headers before the first frame if not yet sent
|
||||
if(bSendingVideo)
|
||||
{
|
||||
if(!bSentHeaders)
|
||||
{
|
||||
network->BeginPublishing();
|
||||
bSentHeaders = true;
|
||||
|
||||
DataPacket seiPacket;
|
||||
videoEncoder->GetSEI(seiPacket);
|
||||
|
||||
network->SendPacket(seiPacket.lpPacket, seiPacket.size, 0, PacketType_VideoHighest);
|
||||
}
|
||||
|
||||
OSEnterMutex(hSoundDataMutex);
|
||||
|
||||
if(pendingAudioFrames.Num())
|
||||
{
|
||||
//Log(TEXT("pending frames %u, (in milliseconds): %u"), pendingAudioFrames.Num(), pendingAudioFrames.Last().timestamp-pendingAudioFrames[0].timestamp);
|
||||
while(pendingAudioFrames.Num() && pendingAudioFrames[0].timestamp < curTimeStamp)
|
||||
if(curPTSVal != 0)
|
||||
{
|
||||
List<BYTE> &audioData = pendingAudioFrames[0].audioData;
|
||||
/*int toleranceVal = int(lastPTSVal+frameTime);
|
||||
int toleranceOffset = (int(curPTSVal)-toleranceVal);
|
||||
int halfFrameTime = int(frameTime/2);
|
||||
|
||||
if(audioData.Num())
|
||||
{
|
||||
network->SendPacket(audioData.Array(), audioData.Num(), pendingAudioFrames[0].timestamp, PacketType_Audio);
|
||||
if(fileStream)
|
||||
fileStream->AddPacket(audioData.Array(), audioData.Num(), pendingAudioFrames[0].timestamp, PacketType_Audio);
|
||||
if(toleranceOffset > halfFrameTime)
|
||||
curPTSVal = DWORD(toleranceVal+(toleranceOffset-halfFrameTime));
|
||||
else if(toleranceOffset < -halfFrameTime)
|
||||
curPTSVal = DWORD(toleranceVal+(toleranceOffset+halfFrameTime));
|
||||
else
|
||||
curPTSVal = DWORD(toleranceVal);*/
|
||||
|
||||
audioData.Clear();
|
||||
}
|
||||
//this turned out to be much better than the previous way I was doing it.
|
||||
//if the FPS is set to about the same as the capture FPS, this works pretty much flawlessly.
|
||||
//100% calculated timestamps that are almost fully accurate with no CPU timers involved,
|
||||
//while still fully allowing any potential unexpected frame variability.
|
||||
curPTSVal = lastPTSVal+frameTimeAdjust;
|
||||
if(curPTSVal < lastUnmodifiedPTSVal)
|
||||
curPTSVal = lastUnmodifiedPTSVal;
|
||||
|
||||
//Log(TEXT("audio packet timestamp: %u"), pendingAudioFrames[0].timestamp);
|
||||
|
||||
pendingAudioFrames[0].audioData.Clear();
|
||||
pendingAudioFrames.Remove(0);
|
||||
bufferedTimes[curPTS-1] = curPTSVal;
|
||||
}
|
||||
|
||||
lastUnmodifiedPTSVal = savedPTSVal;
|
||||
lastPTSVal = curPTSVal;
|
||||
|
||||
//Log(TEXT("val: %u - adjusted: %u"), savedPTSVal, curPTSVal);
|
||||
}
|
||||
|
||||
OSLeaveMutex(hSoundDataMutex);
|
||||
picOut.i_pts = curPTSVal;
|
||||
|
||||
for(UINT i=0; i<videoPackets.Num(); i++)
|
||||
//------------------------------------
|
||||
// encode
|
||||
|
||||
profileIn("call to encoder");
|
||||
|
||||
videoEncoder->Encode(&picOut, videoPackets, videoPacketTypes, curTimeStamp);
|
||||
if(bUsing444) prevTexture->Unmap(0);
|
||||
|
||||
profileOut;
|
||||
|
||||
//------------------------------------
|
||||
// upload
|
||||
|
||||
bool bSendingVideo = videoPackets.Num() > 0;
|
||||
|
||||
//send headers before the first frame if not yet sent
|
||||
if(bSendingVideo)
|
||||
{
|
||||
DataPacket &packet = videoPackets[i];
|
||||
PacketType type = videoPacketTypes[i];
|
||||
if(!bSentHeaders)
|
||||
{
|
||||
network->BeginPublishing();
|
||||
bSentHeaders = true;
|
||||
|
||||
network->SendPacket(packet.lpPacket, packet.size, curTimeStamp, type);
|
||||
if(fileStream)
|
||||
fileStream->AddPacket(packet.lpPacket, packet.size, curTimeStamp, type);
|
||||
DataPacket seiPacket;
|
||||
videoEncoder->GetSEI(seiPacket);
|
||||
|
||||
network->SendPacket(seiPacket.lpPacket, seiPacket.size, 0, PacketType_VideoHighest);
|
||||
}
|
||||
|
||||
OSEnterMutex(hSoundDataMutex);
|
||||
|
||||
if(pendingAudioFrames.Num())
|
||||
{
|
||||
//Log(TEXT("pending frames %u, (in milliseconds): %u"), pendingAudioFrames.Num(), pendingAudioFrames.Last().timestamp-pendingAudioFrames[0].timestamp);
|
||||
while(pendingAudioFrames.Num() && pendingAudioFrames[0].timestamp < curTimeStamp)
|
||||
{
|
||||
List<BYTE> &audioData = pendingAudioFrames[0].audioData;
|
||||
|
||||
if(audioData.Num())
|
||||
{
|
||||
network->SendPacket(audioData.Array(), audioData.Num(), pendingAudioFrames[0].timestamp, PacketType_Audio);
|
||||
if(fileStream)
|
||||
fileStream->AddPacket(audioData.Array(), audioData.Num(), pendingAudioFrames[0].timestamp, PacketType_Audio);
|
||||
|
||||
audioData.Clear();
|
||||
}
|
||||
|
||||
//Log(TEXT("audio packet timestamp: %u"), pendingAudioFrames[0].timestamp);
|
||||
|
||||
pendingAudioFrames[0].audioData.Clear();
|
||||
pendingAudioFrames.Remove(0);
|
||||
}
|
||||
}
|
||||
|
||||
OSLeaveMutex(hSoundDataMutex);
|
||||
|
||||
for(UINT i=0; i<videoPackets.Num(); i++)
|
||||
{
|
||||
DataPacket &packet = videoPackets[i];
|
||||
PacketType type = videoPacketTypes[i];
|
||||
|
||||
network->SendPacket(packet.lpPacket, packet.size, curTimeStamp, type);
|
||||
if(fileStream)
|
||||
fileStream->AddPacket(packet.lpPacket, packet.size, curTimeStamp, type);
|
||||
}
|
||||
|
||||
curPTS--;
|
||||
|
||||
bufferedTimes.Remove(0);
|
||||
}
|
||||
|
||||
curPTS--;
|
||||
|
||||
bufferedTimes.Remove(0);
|
||||
}
|
||||
}
|
||||
|
||||
if(curCopyTexture == (NUM_RENDER_BUFFERS-1))
|
||||
curCopyTexture = 0;
|
||||
curCopyTexture = prevCopyTexture;
|
||||
|
||||
if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
|
||||
curYUVTexture = 0;
|
||||
else
|
||||
curCopyTexture++;
|
||||
curYUVTexture++;
|
||||
}
|
||||
|
||||
lastRenderTarget = curRenderTarget;
|
||||
@ -2201,7 +2330,26 @@ void OBS::MainCaptureLoop()
|
||||
}
|
||||
|
||||
if(!bUsing444)
|
||||
x264_picture_clean(&picOut);
|
||||
{
|
||||
if(!bFirst420Encode && bUseThreaded420)
|
||||
{
|
||||
WaitForMultipleObjects(numProcessors, h420Threads, TRUE, INFINITE);
|
||||
for(UINT i=0; i<numProcessors; i++)
|
||||
{
|
||||
if(h420Threads)
|
||||
CloseHandle(h420Threads[i]);
|
||||
h420Threads[i] = NULL;
|
||||
}
|
||||
|
||||
ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
|
||||
copyTexture->Unmap(0);
|
||||
}
|
||||
x264_picture_clean(&outPics[0]);
|
||||
x264_picture_clean(&outPics[1]);
|
||||
}
|
||||
|
||||
Free(h420Threads);
|
||||
Free(convertInfo);
|
||||
|
||||
Log(TEXT("Total frames rendered: %d, number of frames that lagged: %d (%0.2f%%) (it's okay for some frames to lag)"), numTotalFrames, numLongFrames, (double(numLongFrames)/double(numTotalFrames))*100.0);
|
||||
|
||||
|
@ -329,7 +329,7 @@ class OBS
|
||||
//---------------------------------------------------
|
||||
// graphics stuff
|
||||
|
||||
ID3D10Texture2D *copyTexture;
|
||||
ID3D10Texture2D *copyTextures[2];
|
||||
|
||||
Texture *mainRenderTextures[NUM_RENDER_BUFFERS];
|
||||
Texture *yuvRenderTextures[NUM_RENDER_BUFFERS];
|
||||
|
@ -42,7 +42,7 @@ struct NetworkPacket
|
||||
};
|
||||
|
||||
//max latency in milliseconds allowed when using the send buffer
|
||||
const DWORD maxBufferTime = 500;
|
||||
const DWORD maxBufferTime = 600;
|
||||
|
||||
class RTMPPublisher : public NetworkStream
|
||||
{
|
||||
@ -57,7 +57,7 @@ class RTMPPublisher : public NetworkStream
|
||||
int packetWaitType;
|
||||
List<NetworkPacket> Packets;
|
||||
UINT numVideoPackets;
|
||||
UINT maxVideoPackets, BFrameThreshold;
|
||||
UINT maxVideoPackets, BFrameThreshold, revertThreshold;
|
||||
|
||||
QWORD bytesSent;
|
||||
|
||||
@ -67,12 +67,14 @@ class RTMPPublisher : public NetworkStream
|
||||
UINT numVideoPacketsBuffered;
|
||||
DWORD firstBufferedVideoFrameTimestamp;
|
||||
|
||||
bool bPacketDumpMode;
|
||||
|
||||
BOOL bUseSendBuffer;
|
||||
|
||||
//all data sending is done in yet another separate thread to prevent blocking in the main capture thread
|
||||
void SendLoop()
|
||||
{
|
||||
traceIn(RTMPPublisher::SendLoop);
|
||||
//traceIn(RTMPPublisher::SendLoop);
|
||||
|
||||
while(WaitForSingleObject(hSendSempahore, INFINITE) == WAIT_OBJECT_0 && !bStopping && RTMP_IsConnected(rtmp))
|
||||
{
|
||||
@ -168,7 +170,7 @@ class RTMPPublisher : public NetworkStream
|
||||
bytesSent += packetData.Num();
|
||||
}
|
||||
|
||||
traceOut;
|
||||
//traceOut;
|
||||
}
|
||||
|
||||
static DWORD SendThread(RTMPPublisher *publisher)
|
||||
@ -289,7 +291,7 @@ class RTMPPublisher : public NetworkStream
|
||||
{
|
||||
if(packet.type >= packetWaitType)
|
||||
{
|
||||
packetWaitType = PacketType_VideoDisposable;
|
||||
packetWaitType = (bPacketDumpMode) ? PacketType_VideoLow : PacketType_VideoDisposable;
|
||||
break;
|
||||
}
|
||||
else //clear out following dependent packets of lower priority
|
||||
@ -353,30 +355,26 @@ class RTMPPublisher : public NetworkStream
|
||||
|
||||
static int BufferedSend(RTMPSockBuf *sb, const char *buf, int len, RTMPPublisher *network)
|
||||
{
|
||||
bool bComplete = false;
|
||||
int fullLen = len;
|
||||
bool bSentData = false;
|
||||
|
||||
int newTotal = network->curSendBufferLen+len;
|
||||
|
||||
//would exceed buffer size, send instead.
|
||||
if(newTotal >= int(network->sendBuffer.Num()))
|
||||
do
|
||||
{
|
||||
int nBytes;
|
||||
//flush existing packets
|
||||
nBytes = network->FlushSendBuffer();
|
||||
if(nBytes < 0)
|
||||
return nBytes;
|
||||
if(nBytes == 0)
|
||||
return 0;
|
||||
int newTotal = network->curSendBufferLen+len;
|
||||
|
||||
//if packet is bigger than the send buffer, just send it straight up instead of buffering
|
||||
if((UINT)len > network->sendBuffer.Num())
|
||||
//buffer full, send
|
||||
if(newTotal >= int(network->sendBuffer.Num()))
|
||||
{
|
||||
const char *lpTemp = buf;
|
||||
int totalBytesSent = len;
|
||||
int pendingBytes = newTotal-network->sendBuffer.Num();
|
||||
int copyCount = network->sendBuffer.Num()-network->curSendBufferLen;
|
||||
|
||||
mcpy(network->sendBuffer.Array()+network->curSendBufferLen, buf, copyCount);
|
||||
|
||||
BYTE *lpTemp = network->sendBuffer.Array();
|
||||
int totalBytesSent = network->sendBuffer.Num();
|
||||
while(totalBytesSent > 0)
|
||||
{
|
||||
nBytes = send(sb->sb_socket, lpTemp, totalBytesSent, 0);
|
||||
int nBytes = send(sb->sb_socket, (const char*)lpTemp, totalBytesSent, 0);
|
||||
if(nBytes < 0)
|
||||
return nBytes;
|
||||
if(nBytes == 0)
|
||||
@ -386,17 +384,27 @@ class RTMPPublisher : public NetworkStream
|
||||
lpTemp += nBytes;
|
||||
}
|
||||
|
||||
len = 0;
|
||||
network->curSendBufferLen = 0;
|
||||
|
||||
if(pendingBytes)
|
||||
{
|
||||
buf += copyCount;
|
||||
len -= copyCount;
|
||||
}
|
||||
else
|
||||
bComplete = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
if(len)
|
||||
{
|
||||
mcpy(network->sendBuffer.Array()+network->curSendBufferLen, buf, len);
|
||||
network->curSendBufferLen = newTotal;
|
||||
}
|
||||
|
||||
network->numVideoPacketsBuffered = 0;
|
||||
}
|
||||
|
||||
if(len > 0)
|
||||
{
|
||||
mcpy(network->sendBuffer.Array()+network->curSendBufferLen, buf, len);
|
||||
network->curSendBufferLen += len;
|
||||
}
|
||||
bComplete = true;
|
||||
}
|
||||
} while(!bComplete);
|
||||
|
||||
return fullLen;
|
||||
}
|
||||
@ -404,7 +412,7 @@ class RTMPPublisher : public NetworkStream
|
||||
public:
|
||||
RTMPPublisher(RTMP *rtmpIn, BOOL bUseSendBuffer, UINT sendBufferSize)
|
||||
{
|
||||
traceIn(RTMPPublisher::RTMPPublisher);
|
||||
//traceIn(RTMPPublisher::RTMPPublisher);
|
||||
|
||||
rtmp = rtmpIn;
|
||||
|
||||
@ -440,13 +448,14 @@ public:
|
||||
|
||||
BFrameThreshold = 30; //when it starts cutting out b frames
|
||||
maxVideoPackets = 70; //when it starts cutting out p frames
|
||||
revertThreshold = 2; //when it reverts to normal
|
||||
|
||||
traceOut;
|
||||
//traceOut;
|
||||
}
|
||||
|
||||
~RTMPPublisher()
|
||||
{
|
||||
traceIn(RTMPPublisher::~RTMPPublisher);
|
||||
//traceIn(RTMPPublisher::~RTMPPublisher);
|
||||
|
||||
bStopping = true;
|
||||
ReleaseSemaphore(hSendSempahore, 1, NULL);
|
||||
@ -475,12 +484,12 @@ public:
|
||||
RTMP_Free(rtmp);
|
||||
}
|
||||
|
||||
traceOut;
|
||||
//traceOut;
|
||||
}
|
||||
|
||||
void SendPacket(BYTE *data, UINT size, DWORD timestamp, PacketType type)
|
||||
{
|
||||
traceIn(RTMPPublisher::SendPacket);
|
||||
//traceIn(RTMPPublisher::SendPacket);
|
||||
|
||||
if(!bStopping)
|
||||
{
|
||||
@ -488,18 +497,25 @@ public:
|
||||
paddedData.SetSize(size+RTMP_MAX_HEADER_SIZE);
|
||||
mcpy(paddedData.Array()+RTMP_MAX_HEADER_SIZE, data, size);
|
||||
|
||||
if(bPacketDumpMode && Packets.Num() <= revertThreshold)
|
||||
bPacketDumpMode = false;
|
||||
|
||||
bool bAddPacket = false;
|
||||
if(type >= packetWaitType)
|
||||
{
|
||||
if(type != PacketType_Audio)
|
||||
{
|
||||
packetWaitType = PacketType_VideoDisposable;
|
||||
packetWaitType = (bPacketDumpMode) ? PacketType_VideoLow : PacketType_VideoDisposable;
|
||||
|
||||
if(type <= PacketType_VideoHigh)
|
||||
numVideoPackets++;
|
||||
}
|
||||
|
||||
//-----------------
|
||||
bAddPacket = true;
|
||||
}
|
||||
|
||||
if(bAddPacket)
|
||||
{
|
||||
OSEnterMutex(hDataMutex);
|
||||
|
||||
NetworkPacket &netPacket = *Packets.CreateNew();
|
||||
@ -509,7 +525,16 @@ public:
|
||||
|
||||
//begin dumping b frames if there's signs of lag
|
||||
if(numVideoPackets > BFrameThreshold)
|
||||
{
|
||||
if(!bPacketDumpMode)
|
||||
{
|
||||
bPacketDumpMode = true;
|
||||
if(packetWaitType == PacketType_VideoDisposable)
|
||||
packetWaitType = PacketType_VideoLow;
|
||||
}
|
||||
|
||||
DumpBFrame();
|
||||
}
|
||||
|
||||
//begin dumping p frames if b frames aren't enough
|
||||
if(numVideoPackets > maxVideoPackets)
|
||||
@ -546,12 +571,12 @@ public:
|
||||
}*/
|
||||
}
|
||||
|
||||
traceOut;
|
||||
//traceOut;
|
||||
}
|
||||
|
||||
void BeginPublishing()
|
||||
{
|
||||
traceIn(RTMPPublisher::BeginPublishing);
|
||||
//traceIn(RTMPPublisher::BeginPublishing);
|
||||
|
||||
RTMPPacket packet;
|
||||
|
||||
@ -619,7 +644,7 @@ public:
|
||||
return;
|
||||
}
|
||||
|
||||
traceOut;
|
||||
//traceOut;
|
||||
}
|
||||
|
||||
double GetPacketStrain() const
|
||||
@ -640,7 +665,7 @@ public:
|
||||
|
||||
NetworkStream* CreateRTMPPublisher(String &failReason, bool &bCanRetry)
|
||||
{
|
||||
traceIn(CreateRTMPPublisher);
|
||||
//traceIn(CreateRTMPPublisher);
|
||||
|
||||
//------------------------------------------------------
|
||||
// set up URL
|
||||
@ -763,5 +788,5 @@ NetworkStream* CreateRTMPPublisher(String &failReason, bool &bCanRetry)
|
||||
|
||||
return new RTMPPublisher(rtmp, bUseSendBuffer, sendBufferSize);
|
||||
|
||||
traceOut;
|
||||
//traceOut;
|
||||
}
|
||||
|
@ -258,6 +258,8 @@ LRESULT CALLBACK OBS::ListboxHook(HWND hwnd, UINT message, WPARAM wParam, LPARAM
|
||||
{
|
||||
if(message == WM_RBUTTONDOWN)
|
||||
{
|
||||
CallWindowProc(listboxProc, hwnd, WM_LBUTTONDOWN, wParam, lParam);
|
||||
|
||||
UINT id = (UINT)GetWindowLongPtr(hwnd, GWL_ID);
|
||||
|
||||
HMENU hMenu = CreatePopupMenu();
|
||||
|
Loading…
x
Reference in New Issue
Block a user