obs-outputs: Use INVALID_SOCKET instead of -1
On windows, for whatever reason sockets use the SOCKET type which is not a signed integer. Still, even though it's not a signed integer, -1 is used to indicate an invalid socket, but the way you use it is via microsoft's fabulously dumb little INVALID_SOCKET define, so we have to make librtmp use that instead.
This commit is contained in:
@@ -131,7 +131,7 @@ HTTP_get(struct HTTP_ctx *http, const char *url, HTTP_read_callback *cb)
|
||||
}
|
||||
sa.sin_port = htons(port);
|
||||
sb.sb_socket = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
|
||||
if (sb.sb_socket == -1)
|
||||
if (sb.sb_socket == INVALID_SOCKET)
|
||||
return HTTPRES_LOST_CONNECTION;
|
||||
i =
|
||||
sprintf(sb.sb_buf,
|
||||
|
@@ -400,7 +400,7 @@ RTMP_GetDuration(RTMP *r)
|
||||
int
|
||||
RTMP_IsConnected(RTMP *r)
|
||||
{
|
||||
return r->m_sb.sb_socket != -1;
|
||||
return r->m_sb.sb_socket != INVALID_SOCKET;
|
||||
}
|
||||
|
||||
SOCKET
|
||||
@@ -735,7 +735,7 @@ RTMP_Connect0(RTMP *r, struct sockaddr * service)
|
||||
r->m_sb.sb_socket = socket(service->sa_family, SOCK_STREAM, IPPROTO_TCP);
|
||||
#endif
|
||||
|
||||
if (r->m_sb.sb_socket != -1)
|
||||
if (r->m_sb.sb_socket != INVALID_SOCKET)
|
||||
{
|
||||
if(r->m_bindIP.addrLen)
|
||||
{
|
||||
@@ -4328,7 +4328,7 @@ RTMPSockBuf_Close(RTMPSockBuf *sb)
|
||||
sb->sb_ssl = NULL;
|
||||
}
|
||||
#endif
|
||||
if (sb->sb_socket != -1)
|
||||
if (sb->sb_socket != INVALID_SOCKET)
|
||||
return closesocket(sb->sb_socket);
|
||||
return 0;
|
||||
}
|
||||
|
@@ -73,6 +73,9 @@
|
||||
#define closesocket(s) close(s)
|
||||
#define msleep(n) usleep(n*1000)
|
||||
#define SET_RCVTIMEO(tv,s) struct timeval tv = {s,0}
|
||||
#ifndef INVALID_SOCKET
|
||||
#define INVALID_SOCKET -1
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#include "rtmp.h"
|
||||
|
Reference in New Issue
Block a user