I'm writing a small C client/server application, but I cannot make the connection work when using the external IP address. The code for both client and server is taken from here, in particular the clients do:
char *default_server_name = "localhost";
char *server_name = NULL;
int nport = DEFAULT_DAMA_PORT;
char port[15];
// Parse the command line options
if (parse_options(argc, argv, &server_name, &nport) < 0) {
return -1;
}
if (server_name == NULL) {
server_name = default_server_name;
}
snprintf(port, 15, "%d", nport);
// Connect to the server
int client_socket;
struct addrinfo hints, *servinfo, *p;
int rv;
memset(&hints, 0, sizeof(hints));
hints.ai_family = AF_UNSPEC;
hints.ai_socktype = SOCK_STREAM;
if ((rv = getaddrinfo(server_name, port, &hints, &servinfo)) != 0) {
fprintf(stderr, "getaddrinfo: %s\n", gai_strerror(rv));
exit(1);
}
for (p=servinfo; p != NULL; p = p->ai_next) {
if ((client_socket = socket(p->ai_family, p->ai_socktype, p->ai_protocol)) == -1) {
#ifdef DEBUG
perror("socket");
#endif
continue;
}
if (connect(client_socket, p->ai_addr, p->ai_addrlen) == -1) {
close(client_socket);
#ifdef DEBUG
perror("connect");
#endif
continue;
}
// Connected succesfully!
break;
}
if (p == NULL) {
// The loop wasn't able to connect to the server
fprintf(stderr, "Couldn't connect to the server\n.");
exit(1);
}
While the server:
int nport;
char port[15];
if (parse_options(argc, argv, &nport) < 0) {
return -1;
}
snprintf(port, 15, "%d", nport);
int server_socket;
struct addrinfo hints, *servinfo, *p;
int rv;
memset(&hints, 0, sizeof(hints));
hints.ai_family = AF_UNSPEC;
hints.ai_socktype = SOCK_STREAM;
hints.ai_flags = AI_PASSIVE;
if ((rv = getaddrinfo(NULL, port, &hints, &servinfo)) != 0) {
fprintf(stderr, "getaddrinfo: %s\n", gai_strerror(rv));
exit(1);
}
for (p=servinfo; p != NULL; p = p->ai_next) {
if ((server_socket = socket(p->ai_family, p->ai_socktype, p->ai_protocol)) == -1) {
#ifdef DEBUG
perror("socket");
#endif
continue;
}
if (bind(server_socket, p->ai_addr, p->ai_addrlen) == -1) {
close(server_socket);
#ifdef DEBUG
perror("bind");
#endif
continue;
}
// We binded successfully!
break;
}
if (p == NULL) {
fprintf(stderr, "failed to bind socket\n");
exit(2);
}
int pl_one, pl_two;
socklen_t pl_one_len, pl_two_len;
struct sockaddr_in pl_one_addr, pl_two_addr;
if (listen(server_socket, 2) < 0) {
fatal_error("Error in syscall listen.", 2);
}
// Get the two clients connections.
pl_one_len = sizeof(pl_one_addr);
pl_one = accept(server_socket,
(struct sockaddr *)&pl_one_addr,
&pl_one_len);
if (pl_one < 0) {
fatal_error("Error in syscall accept.", 3);
}
pl_two_len = sizeof(pl_two_addr);
pl_two = accept(server_socket,
(struct sockaddr *)&pl_two_addr,
&pl_two_len);
if (pl_two < 0) {
fatal_error("Error in syscall accept.", 3);
}
If I specify the IP of my machine in the command line, and hence the server_name
in the clients is set to a string like 151.51.xxx.xxx
, then the sockets cannot connect to the server. Also using 127.0.0.1
shows the same behaviour, which makes me think that when the documentation states:
The host name that you're interested in goes in the nodename parameter. The address can be either a host name, like "www.example.com", or an IPv4 or IPv6 address (passed as a string).
it's just joking.
Am I doing something incorrectly? Could there be some problem with firewall etc. that is preventing the clients to connect using the IP addresses?
Note: I've already searched a lot for this problem and some people say to avoid using getaddrinfo
at all and directly fill with INADDR_ANY
, but the getaddrinfo
documentation states that passing NULL
as nodename
should already fill the address with INADDR_ANY
, so I don't see why should I use the old method when the new one does this automatically.