0

I'm using node-rdkafka (https://github.com/Blizzard/node-rdkafka) to consume and produce messages from an IBM Bluemix Message Hub. I'm able to consume and produce messages without issue, as long as I only subscribe to a max of 2 topics per node process. As soon as I subscribe to three or more topics, my consumer no longer receives any messages on any of the subscribed topics. I don't see any errors.

Is there a soft limit here? Or is there something in my code causing this issue? Bumping up server memory don't seem to have any effect.

Producer code:

events.send = events.produce = (topic, type, data) => {
  log.info('Sending message on topic ' + topic);

  let producer = lib.getProducer(hubConfig);

  // Connect to the broker manually
  producer.connect({}, (err) => {
    if (err) {
      log.error('Producer failed to connect');
      log.error(err);
    }
  });

  // Wait for the ready event before proceeding
  producer.on('ready', () => {
    log.info('Producer ready, sending message');
    try {
      producer.produce(
        topic,
        null,
        new Buffer(JSON.stringify(data)),
        type,
        Date.now()
      );
    } catch (err) {
      log.error('A problem occurred when sending our message');
      log.error(err);
    }
  });

  producer.on('event.error', (err) => {
    log.error('Error from producer');
    log.error(err);
  })
};

lib.getProducer = (hubConfig) => {
  return new Kafka.Producer({
      'metadata.broker.list': hubConfig.kafka_brokers_sasl.join(','),
      'security.protocol': 'sasl_ssl',
      'ssl.ca.location': '/etc/ssl/certs',
      'sasl.mechanisms': 'PLAIN',
      'sasl.username': hubConfig.user,
      'sasl.password': hubConfig.password,
      'api.version.request': true,
      'dr_cb': true,
      'event_cb': true
    });
};

Consumer:

events.listen = events.consume = (topics, callback) => {
    if (!_.isArray(topics)) {
      topics = [topics];
    }
    log.info('Subscribing to ' + topics.join(', ') + ' on test event listener...');
    let consumer,
      emitter = new evt.EventEmitter(),

      // Each consumer has a unique group and client ID
      groupName = 'group-' + uuidv1(),
      clientName = 'client-' + uuidv1();

    consumer = lib.getConsumer(hubConfig, groupName, clientName);

    consumer.connect({}, (err) => {
      if (err) {
        log.error('Consumer failed to connect');
        log.error(err);
        if (callback) callback(err);
      }
    });
    consumer
      .on('ready', function() {
        log.info('Consumer connected, subscribed to ' + topics.join(', '));
        consumer.subscribe(topics);
        consumer.consume();
        if (callback) callback();
      })
      .on('data', function(data) {
        let d = data.value.toString().replace(/"/g,''),
          dupeKey = d + '-' + data.key;
        if (!duplicateBuffer[dupeKey]) {
          emitter.emit('message', {
            data: d,
            type: data.key,
            topic: data.topic
          });

          duplicateBuffer[dupeKey] = setTimeout(() => {
            delete duplicateBuffer[dupeKey];
          }, DUPE_DELAY);
        } else {
          log.info('Ignoring duplicate event: ' + d + ' ' + data.type);
        }
      })
      .on('error', (err) => {
        log.error(err);
        emitter.emit('error', err);
      });

    return emitter;
  };

lib.getConsumer = (hubConfig, groupName, clientName) => {
  return new Kafka.KafkaConsumer({
      'group.id': groupName,
      'client.id': clientName,
      'metadata.broker.list': hubConfig.kafka_brokers_sasl.join(','),
      'security.protocol': 'sasl_ssl',
      'ssl.ca.location': '/etc/ssl/certs',
      'sasl.mechanisms': 'PLAIN',
      'sasl.username': hubConfig.user,
      'sasl.password': hubConfig.password,
      'api.version.request': true,
      'event_cb': true
    }, {});
};

Any suggestions?

Mickael Maison
  • 18,458
  • 7
  • 48
  • 49
alreit
  • 177
  • 1
  • 9

1 Answers1

2

There is no soft limit about the number of topics you can subscribe to with node-rdkafka Consumers.

Just to verify, I tweaked our node-rdkafka sample (https://github.com/ibm-messaging/message-hub-samples/tree/master/kafka-nodejs-console-sample) to use 3 topics and it worked fine as expected.

One thing I don't see in your producer code is a call to

  producer.setPollInterval(100);

Also to help investigate, I'd suggest to set:

 'debug': 'all'

in both your clients configuration.

Mickael Maison
  • 18,458
  • 7
  • 48
  • 49
  • With the debug messages on I was able to find that one of the topics I was trying to consume wasn't created on the message hub. With that rectified all seems well! – alreit Nov 28 '17 at 15:49