Skip to content

Commit 9b556d2

Browse files
committed
Changed property name to js.consumer.max.batch.size
1 parent 2432d04 commit 9b556d2

File tree

9 files changed

+14
-14
lines changed

9 files changed

+14
-14
lines changed

CHANGELOG.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ v1.6.1 is a maintenance release. It is supported for all usage.
44

55
### Enhancements
66

7-
1. Configurable batch size through the `js.max.batch.size` property (#389).
7+
1. Configurable batch size through the `js.consumer.max.batch.size` property (#389).
88

99

1010
# confluent-kafka-javascript 1.6.0

MIGRATION.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -304,7 +304,7 @@ producerRun().then(consumerRun).catch(console.error);
304304
Heartbeats are automatically managed by librdkafka.
305305
- The `partitionsConsumedConcurrently` is supported by both `eachMessage` and `eachBatch`.
306306
- An API compatible version of `eachBatch` is available, maximum batch size
307-
can be configured through the `js.max.batch.size` configuration property
307+
can be configured through the `js.consumer.max.batch.size` configuration property
308308
and defaults to 32.
309309
The property `eachBatchAutoResolve` is supported.
310310
Within the `eachBatch` callback, use of `uncommittedOffsets` is unsupported,

examples/performance/performance-primitives.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -151,7 +151,7 @@ function newCompatibleConsumer(parameters, eachBatch) {
151151
{ 'enable.auto.commit': false };
152152
const jsOpts = {};
153153
if (eachBatch && CONSUMER_MAX_BATCH_SIZE !== null) {
154-
jsOpts['js.max.batch.size'] = CONSUMER_MAX_BATCH_SIZE;
154+
jsOpts['js.consumer.max.batch.size'] = CONSUMER_MAX_BATCH_SIZE;
155155
}
156156

157157
let groupId = eachBatch ? process.env.GROUPID_BATCH : process.env.GROUPID_MESSAGE;

lib/kafkajs/_consumer.js

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -701,19 +701,19 @@ class Consumer {
701701
this.#cacheExpirationTimeoutMs = this.#maxPollIntervalMs;
702702
rdKafkaConfig['max.poll.interval.ms'] = this.#maxPollIntervalMs * 2;
703703

704-
if (rdKafkaConfig['js.max.batch.size'] !== undefined) {
705-
const maxBatchSize = +rdKafkaConfig['js.max.batch.size'];
704+
if (rdKafkaConfig['js.consumer.max.batch.size'] !== undefined) {
705+
const maxBatchSize = +rdKafkaConfig['js.consumer.max.batch.size'];
706706
if (!Number.isInteger(maxBatchSize) || (maxBatchSize <= 0 && maxBatchSize !== -1)) {
707707
throw new error.KafkaJSError(
708-
"'js.max.batch.size' must be a positive integer or -1 for unlimited batch size.",
708+
"'js.consumer.max.batch.size' must be a positive integer or -1 for unlimited batch size.",
709709
{ code: error.ErrorCodes.ERR__INVALID_ARG });
710710
}
711711
this.#maxBatchSize = maxBatchSize;
712712
this.#maxBatchesSize = maxBatchSize;
713713
if (maxBatchSize === -1) {
714714
this.#messageCacheMaxSize = Number.MAX_SAFE_INTEGER;
715715
}
716-
delete rdKafkaConfig['js.max.batch.size'];
716+
delete rdKafkaConfig['js.consumer.max.batch.size'];
717717
}
718718

719719
return rdKafkaConfig;

lib/util.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,4 +52,4 @@ util.dictToStringList = function (mapOrObject) {
5252
return list;
5353
};
5454

55-
util.bindingVersion = '1.6.1-alpha.0';
55+
util.bindingVersion = '1.6.1-alpha.1';

package-lock.json

Lines changed: 3 additions & 3 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@confluentinc/kafka-javascript",
3-
"version": "1.6.1-alpha.0",
3+
"version": "1.6.1-alpha.1",
44
"description": "Node.js bindings for librdkafka",
55
"librdkafka": "2.12.0",
66
"librdkafka_win": "2.12.0",

schemaregistry/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@confluentinc/schemaregistry",
3-
"version": "1.6.1-alpha.0",
3+
"version": "1.6.1-alpha.1",
44
"description": "Node.js client for Confluent Schema Registry",
55
"main": "dist/index.js",
66
"types": "dist/index.d.ts",

types/kafkajs.d.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -251,7 +251,7 @@ export interface JSConsumerConfig {
251251
*
252252
* @default 32
253253
*/
254-
'js.max.batch.size'?: string | number
254+
'js.consumer.max.batch.size'?: string | number
255255
}
256256

257257
export type ConsumerGlobalAndTopicConfig = ConsumerGlobalConfig & ConsumerTopicConfig & JSConsumerConfig;

0 commit comments

Comments
 (0)