我正在使用node-rdkafka(https://github.com/Blizzard/node-rdkafka)来使用和生成来自IBM Bluemix Message Hub的消息 . 我'm able to consume and produce messages without issue, as long as I only subscribe to a max of 2 topics per node process. As soon as I subscribe to three or more topics, my consumer no longer receives any messages on any of the subscribed topics. I don'看到任何错误 .
这里有软限制吗?或者我的代码中是否存在导致此问题的内容? Bumping up服务器内存似乎没有任何影响 .
制片人代码:
events.send = events.produce = (topic, type, data) => {
log.info('Sending message on topic ' + topic);
let producer = lib.getProducer(hubConfig);
// Connect to the broker manually
producer.connect({}, (err) => {
if (err) {
log.error('Producer failed to connect');
log.error(err);
}
});
// Wait for the ready event before proceeding
producer.on('ready', () => {
log.info('Producer ready, sending message');
try {
producer.produce(
topic,
null,
new Buffer(JSON.stringify(data)),
type,
Date.now()
);
} catch (err) {
log.error('A problem occurred when sending our message');
log.error(err);
}
});
producer.on('event.error', (err) => {
log.error('Error from producer');
log.error(err);
})
};
lib.getProducer = (hubConfig) => {
return new Kafka.Producer({
'metadata.broker.list': hubConfig.kafka_brokers_sasl.join(','),
'security.protocol': 'sasl_ssl',
'ssl.ca.location': '/etc/ssl/certs',
'sasl.mechanisms': 'PLAIN',
'sasl.username': hubConfig.user,
'sasl.password': hubConfig.password,
'api.version.request': true,
'dr_cb': true,
'event_cb': true
});
};
消费者:
events.listen = events.consume = (topics, callback) => {
if (!_.isArray(topics)) {
topics = [topics];
}
log.info('Subscribing to ' + topics.join(', ') + ' on test event listener...');
let consumer,
emitter = new evt.EventEmitter(),
// Each consumer has a unique group and client ID
groupName = 'group-' + uuidv1(),
clientName = 'client-' + uuidv1();
consumer = lib.getConsumer(hubConfig, groupName, clientName);
consumer.connect({}, (err) => {
if (err) {
log.error('Consumer failed to connect');
log.error(err);
if (callback) callback(err);
}
});
consumer
.on('ready', function() {
log.info('Consumer connected, subscribed to ' + topics.join(', '));
consumer.subscribe(topics);
consumer.consume();
if (callback) callback();
})
.on('data', function(data) {
let d = data.value.toString().replace(/"/g,''),
dupeKey = d + '-' + data.key;
if (!duplicateBuffer[dupeKey]) {
emitter.emit('message', {
data: d,
type: data.key,
topic: data.topic
});
duplicateBuffer[dupeKey] = setTimeout(() => {
delete duplicateBuffer[dupeKey];
}, DUPE_DELAY);
} else {
log.info('Ignoring duplicate event: ' + d + ' ' + data.type);
}
})
.on('error', (err) => {
log.error(err);
emitter.emit('error', err);
});
return emitter;
};
lib.getConsumer = (hubConfig, groupName, clientName) => {
return new Kafka.KafkaConsumer({
'group.id': groupName,
'client.id': clientName,
'metadata.broker.list': hubConfig.kafka_brokers_sasl.join(','),
'security.protocol': 'sasl_ssl',
'ssl.ca.location': '/etc/ssl/certs',
'sasl.mechanisms': 'PLAIN',
'sasl.username': hubConfig.user,
'sasl.password': hubConfig.password,
'api.version.request': true,
'event_cb': true
}, {});
};
有什么建议?
1 回答
您可以使用node-rdkafka使用者订阅的主题数量没有软限制 .
为了验证,我调整了我们的node-rdkafka示例(https://github.com/ibm-messaging/message-hub-samples/tree/master/kafka-nodejs-console-sample)以使用3个主题,它按预期工作正常 .
我在 生产环境 者代码中看不到的一件事就是调用
另外为了帮助调查,我建议设置:
在您的客户端配置中 .