Skip to content

Commit 69bee29

Browse files
committed
Merge branch 'master' into dev_add_highwatermark_to_batch
2 parents 40aec5f + c37401c commit 69bee29

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

70 files changed

+3873
-1717
lines changed

.semaphore/post_install.yml

Lines changed: 11 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ blocks:
3636
- env_var: DOCKER_IMAGE
3737
values: ["debian:bullseye", "debian:bookworm", "ubuntu:20.04", "ubuntu:22.04", "ubuntu:24.04"]
3838
- env_var: NODE_VERSION
39-
values: ["18", "20", "21", "22", "23"]
39+
values: ["18", "20", "21", "22", "23", "24"]
4040
commands:
4141
- docker run -v "$(pwd):/v" "$DOCKER_IMAGE" /v/ci/post-install/install-debian.sh "$NODE_VERSION" "$LIBRARY_VERSION"
4242

@@ -55,7 +55,7 @@ blocks:
5555
- env_var: DOCKER_IMAGE
5656
values: ["debian:bullseye", "debian:bookworm", "ubuntu:20.04", "ubuntu:22.04", "ubuntu:24.04"]
5757
- env_var: NODE_VERSION
58-
values: ["18", "20", "21", "22", "23"]
58+
values: ["18", "20", "21", "22", "23", "24"]
5959
commands:
6060
- docker run -v "$(pwd):/v" "$DOCKER_IMAGE" /v/ci/post-install/install-debian.sh "$NODE_VERSION" "$LIBRARY_VERSION"
6161

@@ -72,7 +72,7 @@ blocks:
7272
- name: "Install"
7373
matrix:
7474
- env_var: NODE_VERSION
75-
values: ["18", "20", "21", "22", "23"]
75+
values: ["18", "20", "21", "22", "23", "24"]
7676
commands:
7777
- docker run -v "$(pwd):/v" "node:${NODE_VERSION}-alpine" /v/ci/post-install/install-alpine.sh "$NODE_VERSION" "$LIBRARY_VERSION"
7878

@@ -89,7 +89,7 @@ blocks:
8989
- name: "Install"
9090
matrix:
9191
- env_var: NODE_VERSION
92-
values: ["18", "20", "21", "22", "23"]
92+
values: ["18", "20", "21", "22", "23", "24"]
9393
commands:
9494
- docker run -v "$(pwd):/v" "node:${NODE_VERSION}-alpine" /v/ci/post-install/install-alpine.sh "$NODE_VERSION" "$LIBRARY_VERSION"
9595

@@ -108,7 +108,7 @@ blocks:
108108
- env_var: DOCKER_IMAGE
109109
values: ["quay.io/centos/centos:stream9", "almalinux:9", "rockylinux:9"]
110110
- env_var: NODE_VERSION
111-
values: ["18", "20", "21", "22", "23"]
111+
values: ["18", "20", "21", "22", "23", "24"]
112112
commands:
113113
- docker run -v "$(pwd):/v" "$DOCKER_IMAGE" /v/ci/post-install/install-rhel.sh "$NODE_VERSION" "$LIBRARY_VERSION"
114114

@@ -127,7 +127,7 @@ blocks:
127127
- env_var: DOCKER_IMAGE
128128
values: ["quay.io/centos/centos:stream9", "almalinux:9", "rockylinux:9"]
129129
- env_var: NODE_VERSION
130-
values: ["18", "20", "21", "22", "23"]
130+
values: ["18", "20", "21", "22", "23", "24"]
131131
commands:
132132
- docker run -v "$(pwd):/v" "$DOCKER_IMAGE" /v/ci/post-install/install-rhel.sh "$NODE_VERSION" "$LIBRARY_VERSION"
133133

@@ -136,12 +136,12 @@ blocks:
136136
task:
137137
agent:
138138
machine:
139-
type: s1-prod-macos-13-5-amd64
139+
type: s1-macos-15-amd64-6
140140
jobs:
141141
- name: "Install"
142142
matrix:
143143
- env_var: NODE_VERSION
144-
values: ["18", "20", "21", "22", "23"]
144+
values: ["18", "20", "21", "22", "23", "24"]
145145
commands:
146146
- sem-version node $NODE_VERSION
147147
- bash ci/post-install/install-mac.sh "$NODE_VERSION" "$LIBRARY_VERSION"
@@ -151,12 +151,12 @@ blocks:
151151
task:
152152
agent:
153153
machine:
154-
type: s1-prod-macos-13-5-arm64
154+
type: s1-macos-15-arm64-8
155155
jobs:
156156
- name: "Install"
157157
matrix:
158158
- env_var: NODE_VERSION
159-
values: ["18", "20", "21", "22", "23"]
159+
values: ["18", "20", "21", "22", "23", "24"]
160160
commands:
161161
- sem-version node $NODE_VERSION
162162
- bash ci/post-install/install-mac.sh "$NODE_VERSION" "$LIBRARY_VERSION"
@@ -187,7 +187,7 @@ blocks:
187187
- name: "Install"
188188
matrix:
189189
- env_var: NODE_VERSION
190-
values: ["18.19.0", "20.11.0", "21.6.1", "22.2.0", "23.2.0"]
190+
values: ["18.19.0", "20.11.0", "21.6.1", "22.2.0", "23.2.0", "24.3.0"]
191191
commands:
192192
- Invoke-WebRequest "https://nodejs.org/download/release/v${env:NODE_VERSION}/node-v${env:NODE_VERSION}-x64.msi" -OutFile node.msi
193193
- msiexec /qn /l* node-log.txt /i node.msi

.semaphore/semaphore.yml

Lines changed: 26 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ global_job_config:
2828
- git submodule update --init --recursive
2929
- cd deps/librdkafka
3030
- git fetch origin
31-
- git checkout v2.10.0
31+
- git checkout v2.11.1
3232
- cd ../../
3333
- cache clear
3434

@@ -79,7 +79,7 @@ blocks:
7979
task:
8080
agent:
8181
machine:
82-
type: s1-prod-macos-13-5-arm64
82+
type: s1-macos-15-arm64-8
8383
jobs:
8484
- name: 'Build from source and test'
8585
commands:
@@ -102,13 +102,21 @@ blocks:
102102
commands:
103103
- make test
104104
- artifact push workflow coverage/mocha/coverage-final.json --destination "mocha-coverage.json"
105-
- name: "Promisified Tests"
105+
- name: "Promisified Tests (Classic Protocol)"
106106
commands:
107107
- '[[ -z $DOCKERHUB_APIKEY ]] || docker login --username $DOCKERHUB_USER --password $DOCKERHUB_APIKEY'
108-
- docker compose up -d && sleep 30
108+
- docker compose -f test/docker/docker-compose.yml up -d && sleep 30
109109
- export NODE_OPTIONS='--max-old-space-size=1536'
110110
- npx jest --no-colors --ci test/promisified/
111-
- artifact push workflow coverage/jest/coverage-final.json --destination "jest-coverage.json"
111+
- artifact push workflow coverage/jest/coverage-final.json --destination "jest-classic-coverage.json"
112+
- name: "Promisified Tests (Consumer Protocol)"
113+
commands:
114+
- '[[ -z $DOCKERHUB_APIKEY ]] || docker login --username $DOCKERHUB_USER --password $DOCKERHUB_APIKEY'
115+
- docker compose -f test/docker/docker-compose-kraft.yml up -d && sleep 30
116+
- export TEST_CONSUMER_GROUP_PROTOCOL=consumer
117+
- export NODE_OPTIONS='--max-old-space-size=1536'
118+
- npx jest --no-colors --ci test/promisified/
119+
- artifact push workflow coverage/jest/coverage-final.json --destination "jest-consumer-coverage.json"
112120
- name: "Lint"
113121
commands:
114122
- make lint
@@ -166,7 +174,7 @@ blocks:
166174
- name: "Performance Test"
167175
commands:
168176
- '[[ -z $DOCKERHUB_APIKEY ]] || docker login --username $DOCKERHUB_USER --password $DOCKERHUB_APIKEY'
169-
- docker compose up -d && sleep 30
177+
- docker compose -f test/docker/docker-compose.yml up -d && sleep 30
170178
- export NODE_OPTIONS='--max-old-space-size=1536'
171179
- cd examples/performance
172180
- npm install
@@ -201,6 +209,7 @@ blocks:
201209
- 21,120
202210
- 22,127
203211
- 23,131
212+
- 24,137
204213
commands:
205214
- export NODE_VERSION=$(echo $NODE_VERSION_ABI | cut -d, -f1)
206215
- export NODE_ABI=$(echo $NODE_VERSION_ABI | cut -d, -f2)
@@ -237,6 +246,7 @@ blocks:
237246
- 21,120
238247
- 22,127
239248
- 23,131
249+
- 24,137
240250
commands:
241251
- export NODE_VERSION=$(echo $NODE_VERSION_ABI | cut -d, -f1)
242252
- export NODE_ABI=$(echo $NODE_VERSION_ABI | cut -d, -f2)
@@ -273,6 +283,7 @@ blocks:
273283
- 21,120
274284
- 22,127
275285
- 23,131
286+
- 24,137
276287
commands:
277288
- export NODE_VERSION=$(echo $NODE_VERSION_ABI | cut -d, -f1)
278289
- export NODE_ABI=$(echo $NODE_VERSION_ABI | cut -d, -f2)
@@ -309,6 +320,7 @@ blocks:
309320
- 21,120
310321
- 22,127
311322
- 23,131
323+
- 24,137
312324
commands:
313325
- export NODE_VERSION=$(echo $NODE_VERSION_ABI | cut -d, -f1)
314326
- export NODE_ABI=$(echo $NODE_VERSION_ABI | cut -d, -f2)
@@ -324,7 +336,7 @@ blocks:
324336
task:
325337
agent:
326338
machine:
327-
type: s1-prod-macos-13-5-arm64
339+
type: s1-macos-15-arm64-8
328340
env_vars:
329341
- name: ARCHITECTURE
330342
value: "arm64"
@@ -342,6 +354,7 @@ blocks:
342354
- 21,120
343355
- 22,127
344356
- 23,131
357+
- 24,137
345358
commands:
346359
- export NODE_VERSION=$(echo $NODE_VERSION_ABI | cut -d, -f1)
347360
- export NODE_ABI=$(echo $NODE_VERSION_ABI | cut -d, -f2)
@@ -359,7 +372,7 @@ blocks:
359372
task:
360373
agent:
361374
machine:
362-
type: s1-prod-macos-13-5-amd64
375+
type: s1-macos-15-amd64-6
363376
env_vars:
364377
- name: ARCHITECTURE
365378
value: "x64"
@@ -377,6 +390,7 @@ blocks:
377390
- 21,120
378391
- 22,127
379392
- 23,131
393+
- 24,137
380394
commands:
381395
- export NODE_VERSION=$(echo $NODE_VERSION_ABI | cut -d, -f1)
382396
- export NODE_ABI=$(echo $NODE_VERSION_ABI | cut -d, -f2)
@@ -431,6 +445,7 @@ blocks:
431445
- 21.6.1,120
432446
- 22.2.0,127
433447
- 23.2.0,131
448+
- 24.3.0,137
434449
commands:
435450
- $env:NODE_VERSION = $env:NODE_VERSION_ABI.Split(',')[0]
436451
- $env:NODE_ABI = $env:NODE_VERSION_ABI.Split(',')[1]
@@ -441,6 +456,7 @@ blocks:
441456
- pip install setuptools
442457
- $env:ARTIFACT_KEY = "confluent-kafka-javascript-${env:SEMAPHORE_GIT_TAG_NAME}-node-v${env:NODE_ABI}-${env:PLATFORM}-${env:LIBC}-${env:ARCHITECTURE}.tar.gz"
443458
- echo "$env:ARTIFACT_KEY"
459+
- npm config delete registry # we have no requirement for internal packages.
444460
- npm ci # node-pre-gyp will fallback to build here, because new tag implies no release yet.
445461
- npx node-pre-gyp package
446462
- ls "build/stage/${env:SEMAPHORE_GIT_TAG_NAME}/${env:ARTIFACT_KEY}"
@@ -479,7 +495,8 @@ after_pipeline:
479495
- checkout
480496
- sem-version java 11
481497
- artifact pull workflow mocha-coverage.json
482-
- artifact pull workflow jest-coverage.json
498+
- artifact pull workflow jest-classic-coverage.json
499+
- artifact pull workflow jest-consumer-coverage.json
483500
- artifact pull workflow jest-sr-coverage.json
484501
- npx --yes istanbul-merge --out merged-output/merged-coverage.json *-coverage.json
485502
- npx nyc report -t merged-output --report-dir coverage --reporter=text --reporter=lcov

CHANGELOG.md

Lines changed: 56 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,65 @@
1+
# confluent-kafka-javascript 1.5.0
2+
3+
v1.5.0 is a feature release. It is supported for all usage.
4+
5+
### Enhancements
6+
7+
1. Adds support for `highWatermark`, `offsetLag()`, and `offsetLagLow()` in `eachBatch` callback (#317).
8+
9+
10+
## Fixes
11+
12+
1. Fix issue of delay of up to 5s in receiving messages after pause and resume, or seek (#285, #363).
13+
14+
15+
# confluent-kafka-javascript v1.4.1
16+
17+
v1.4.1 is a maintenance release. It is supported for all usage.
18+
19+
## Enhancements
20+
21+
1. References librdkafka v2.11.1. Refer to the [librdkafka v2.11.1 release notes](https://github.com/confluentinc/librdkafka/releases/tag/v2.11.1) for more information.
22+
23+
124
# confluent-kafka-javascript v1.4.0
225

326
v1.4.0 is a feature release. It is supported for all usage.
427

528
## Enhancements
629

7-
1. Adds support for `highWatermark`, `offsetLag()`, and `offsetLagLow()` in `eachBatch` callback (#317).
30+
1. References librdkafka v2.11.0. Refer to the [librdkafka v2.11.0 release notes](https://github.com/confluentinc/librdkafka/releases/tag/v2.11.0) for more information.
31+
2. [KIP-848] `describeGroups()` now supports KIP-848 introduced `consumer` groups. Two new fields for consumer group type and target assignment have also been added. Type defines whether this group is a `classic` or `consumer` group. Target assignment is only valid for the `consumer` protocol and it defaults to being undefined (#329).
32+
3. [KIP-848] Admin API for listing consumer groups now has an optional filter to return only groups of given types (#328).
33+
4. Add support for Node v24 pre-built binaries (@weyert, #307, #337).
34+
35+
36+
# confluent-kafka-javascript v1.3.2
37+
38+
v1.3.2 is a maintenance release. It is supported for all usage.
39+
40+
## Enhancements
41+
42+
1. References librdkafka v2.10.1. Refer to the [librdkafka v2.10.1 release notes](https://github.com/confluentinc/librdkafka/releases/tag/v2.10.1) for more information.
43+
2. Support for schema id in header (#303)
44+
3. Add CEL support for Data Quality rules (#313)
45+
46+
## Fixes
47+
48+
1. Fix missing await during JSON deserialization (#301)
49+
2. Fix possible NPE in CSFLE executor (#305)
50+
51+
52+
# confluent-kafka-javascript v1.3.1
53+
54+
v1.3.1 is a maintenance release. It is supported for all usage.
55+
56+
## Fixes
57+
58+
1. Avoid a race condition that causes 100% usage of a CPU core when
59+
consuming with `partitionsConsumedConcurrently > 1` and all messages
60+
are consumed (#300)
61+
2. Fix type definition for `Kafka()` constructor and for passing topic configurations
62+
to the `producer()` and `consumer()` in the promisified API (@davidkhala, #297, #321).
863

964

1065
# confluent-kafka-javascript v1.3.0

MIGRATION.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -222,6 +222,7 @@ producerRun().then(consumerRun).catch(console.error);
222222
| allowAutoTopicCreation | true | Determines if a topic should be created if it doesn't exist while consuming. |
223223
| **maxBytesPerPartition** | 1048576 (1MB) | determines how many bytes can be fetched in one request from a single partition. There is a change in semantics, this size grows dynamically if a single message larger than this is encountered, and the client does not get stuck. |
224224
| minBytes | 1 | Minimum number of bytes the broker responds with (or wait until `maxWaitTimeInMs`) |
225+
| **maxWaitTimeInMs** | 500 | Maximum time the broker may wait to fill the Fetch response with minBytes of messages. Its default value has been changed to match librdkafka's configuration.
225226
| maxBytes | 10485760 (10MB) | Maximum number of bytes the broker responds with. |
226227
| **retry** | object | Identical to `retry` in the common configuration. This takes precedence over the common config retry. |
227228
| readUncommitted | false | If true, consumer will read transactional messages which have not been committed. |

README.md

Lines changed: 10 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -33,15 +33,15 @@ The following configurations are supported:
3333

3434
| Distribution | Supported Node Versions |
3535
| ----------------------------------------- | ----------------------- |
36-
| Debian Bullseye/Ubuntu 20.04 | 18, 20, 21, 22, 23 |
37-
| Debian Bookworm/Ubuntu 22.04 | 18, 20, 21, 22, 23 |
38-
| Alpine Linux 3.20+ | 18, 20, 21, 22, 23 |
39-
| AlmaLinux 9/Rocky Linux 9/CentOS Stream 9 | 18, 20, 21, 22, 23 |
36+
| Debian Bullseye/Ubuntu 20.04 | 18, 20, 21, 22, 23, 24 |
37+
| Debian Bookworm/Ubuntu 22.04 | 18, 20, 21, 22, 23, 24 |
38+
| Alpine Linux 3.20+ | 18, 20, 21, 22, 23, 24 |
39+
| AlmaLinux 9/Rocky Linux 9/CentOS Stream 9 | 18, 20, 21, 22, 23, 24 |
4040

4141
Other distributions will probably work given a modern version of gcc/glibc, but we don't test the pre-built binaries with them.
4242

43-
* macOS - arm64/m1. macOS (Intel) is supported on a best-effort basis. Node versions 18, 20, 21, 22, and 23 are supported.
44-
* Windows - x64. Node versions 18, 20, 21, 22, and 23 are supported.
43+
* macOS - arm64/m1. macOS (Intel) is supported on a best-effort basis. Node versions 18, 20, 21, 22, 23 and 24 are supported.
44+
* Windows - x64. Node versions 18, 20, 21, 22, 23 and 24 are supported.
4545

4646
> [!WARNING]
4747
> Pre-built binary support will be dropped after the EOL of the node version or the OS.
@@ -113,6 +113,10 @@ For guidelines on contributing please see [CONTRIBUTING.md](CONTRIBUTING.md)
113113
| 1.0.0 | 2.6.1 |
114114
| 1.2.0 | 2.8.0 |
115115
| 1.3.0 | 2.10.0 |
116+
| 1.3.1 | 2.10.0 |
117+
| 1.3.2 | 2.10.1 |
118+
| 1.4.0 | 2.11.0 |
119+
| 1.4.1 | 2.11.1 |
116120

117121
This mapping is applicable if you're using a pre-built binary. Otherwise, you can check the librdkafka version with the following command:
118122

ci/checks/librdkafka-correct-version.js

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,9 +60,14 @@ function versionAsString(version) {
6060

6161
const librdkafkaVersion = parseLibrdkafkaVersion(defines.RD_KAFKA_VERSION);
6262
const versionString = versionAsString(librdkafkaVersion);
63+
let pjsLibrdkafka = pjs.librdkafka;
64+
const pjsIsRC = pjsLibrdkafka.match(/(.+)-RC[0-9]+$/);
65+
if (pjsIsRC) {
66+
pjsLibrdkafka = pjsIsRC[1];
67+
}
6368

6469
// If our version is a devel (early access) version, we might be on master.
65-
if (pjs.librdkafka !== versionString && !pjs.version.includes('devel')) {
70+
if (pjsLibrdkafka !== versionString && !pjs.version.includes('devel')) {
6671
console.error(`Librdkafka version of ${versionString} does not match package json: ${pjs.librdkafka}`);
6772
process.exit(1);
6873
}

deps/librdkafka

Submodule librdkafka updated 125 files

deps/windows-install.py

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,10 +16,13 @@
1616
depsIncludeDir = '../deps/include'
1717
buildReleaseDir = 'Release'
1818

19-
# alternative: 'https://api.nuget.org/v3-flatcontainer/librdkafka.redist/{}/librdkafka.redist.{}.nupkg'.format(librdkafkaVersion, librdkafkaVersion)
19+
# Use publicly documented API to download librdkafka NuGet package.
20+
# https://api.nuget.org/v3-flatcontainer/{package}/{version}/{package}.{version}.nupkg
21+
# See https://learn.microsoft.com/en-us/nuget/api/package-base-address-resource#download-package-content-nupkg
2022
env_dist = os.environ
21-
downloadBaseUrl = env_dist['NODE_RDKAFKA_NUGET_BASE_URL'] if 'NODE_RDKAFKA_NUGET_BASE_URL' in env_dist else 'https://globalcdn.nuget.org/packages/'
22-
librdkafkaNugetUrl = downloadBaseUrl + 'librdkafka.redist.{}.nupkg'.format(librdkafkaVersion)
23+
downloadBaseUrl = env_dist['NODE_RDKAFKA_NUGET_BASE_URL'] if 'NODE_RDKAFKA_NUGET_BASE_URL' in env_dist else 'https://api.nuget.org/v3-flatcontainer/librdkafka.redist/{version}/'
24+
librdkafkaNugetUrl = downloadBaseUrl + 'librdkafka.redist.{version}.nupkg'
25+
librdkafkaNugetUrl = librdkafkaNugetUrl.format(version=librdkafkaVersion.lower())
2326
print('download librdkafka form ' + librdkafkaNugetUrl)
2427
outputDir = 'librdkafka.redist'
2528
outputFile = outputDir + '.zip'

examples/kafkajs/admin/describe-groups.js

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
// require('kafkajs') is replaced with require('@confluentinc/kafka-javascript').KafkaJS.
2-
const { Kafka, ConsumerGroupStates } = require('@confluentinc/kafka-javascript').KafkaJS;
2+
const { Kafka } = require('@confluentinc/kafka-javascript').KafkaJS;
33
const { parseArgs } = require('node:util');
44

55
function printNode(node, prefix = '') {
@@ -72,6 +72,7 @@ async function adminStart() {
7272
console.log(`\tProtocol type: ${group.protocolType}`);
7373
console.log(`\tPartition assignor: ${group.partitionAssignor}`);
7474
console.log(`\tState: ${group.state}`);
75+
console.log(`\tType: ${group.type}`);
7576
console.log(`\tCoordinator: ${group.coordinator ? group.coordinator.id : group.coordinator}`);
7677
printNode(group.coordinator, '\t');
7778
console.log(`\tAuthorized operations: ${group.authorizedOperations}`);

0 commit comments

Comments
 (0)