Fix some undefined behaviour

- redisSSLContextError must always be initialized at defintion,
  otherwise when SSL connect succeeds it may not be assigned to a valid error.
  Thus the memory trash remains in the variable, which may sign a misleading error.
master
jengab 2021-06-08 08:34:23 +02:00 committed by michael-grunder
parent 507a6dcaa5
commit 0ed6cdec35
2 changed files with 4 additions and 5 deletions

View File

@ -56,7 +56,7 @@ int main (int argc, char **argv) {
const char *caCert = argc > 5 ? argv[6] : NULL;
redisSSLContext *ssl;
redisSSLContextError ssl_error;
redisSSLContextError ssl_error = REDIS_SSL_CTX_NONE;
redisInitOpenSSL();

View File

@ -12,7 +12,7 @@
int main(int argc, char **argv) {
unsigned int j;
redisSSLContext *ssl;
redisSSLContextError ssl_error;
redisSSLContextError ssl_error = REDIS_SSL_CTX_NONE;
redisContext *c;
redisReply *reply;
if (argc < 4) {
@ -27,9 +27,8 @@ int main(int argc, char **argv) {
redisInitOpenSSL();
ssl = redisCreateSSLContext(ca, NULL, cert, key, NULL, &ssl_error);
if (!ssl) {
printf("SSL Context error: %s\n",
redisSSLContextGetError(ssl_error));
if (!ssl || ssl_error != REDIS_SSL_CTX_NONE) {
printf("SSL Context error: %s\n", redisSSLContextGetError(ssl_error));
exit(1);
}