Stop depending on uninitialized variables when setting TCP_NODELAY

The contents of an uninitialized variable was used when setting the
TCP_NODELAY option for sockets created with open-client-stream and
open-server-stream, so this option would not be set when the value
of the variable happened to be 0 (which happened regularly on my
OpenBSD box).

Tested on OpenBSD and Linux.
This commit is contained in:
Kris Katterjohn 2017-06-23 17:06:57 -05:00
parent 0c5be44ad9
commit 26e7802917

View file

@ -133,11 +133,11 @@ int connect_to_server(char *host, int port)
#ifdef TCP_NODELAY
/* make sure to turn off TCP coalescence */
#if defined(ECL_MS_WINDOWS_HOST)
{ char mi;
{ char mi = 1;
setsockopt (fd, IPPROTO_TCP, TCP_NODELAY, &mi, sizeof (char));
}
#else
{ int mi;
{ int mi = 1;
setsockopt (fd, IPPROTO_TCP, TCP_NODELAY, &mi, sizeof (int));
}
#endif
@ -196,11 +196,11 @@ create_server_port(int port)
#ifdef TCP_NODELAY
/* make sure to turn off TCP coalescence */
#if defined(ECL_MS_WINDOWS_HOST)
{ char mi;
{ char mi = 1;
setsockopt(request, IPPROTO_TCP, TCP_NODELAY, &mi, sizeof (char));
}
#else
{ int mi;
{ int mi = 1;
setsockopt(request, IPPROTO_TCP, TCP_NODELAY, &mi, sizeof (int));
}
#endif