diff --git a/packages/grpc-js/.gitignore b/packages/grpc-js/.gitignore
new file mode 100644
index 000000000..ca235a431
--- /dev/null
+++ b/packages/grpc-js/.gitignore
@@ -0,0 +1 @@
+.clinic
diff --git a/packages/grpc-js/benchmarks/README.md b/packages/grpc-js/benchmarks/README.md
new file mode 100644
index 000000000..49aa16b2e
--- /dev/null
+++ b/packages/grpc-js/benchmarks/README.md
@@ -0,0 +1,88 @@
+This folder contains basic benchmarks to approximate performance impact of changes
+
+
+## How to test
+
+1. pnpm build; pnpm ts-node --transpile-only ./benchmarks/server.ts
+2. pnpm tsc -p tsconfig.modern.json; pnpm ts-node --transpile-only ./benchmarks/server.ts
+3. ideally run with jemalloc or memory fragmentation makes everything run slower over time
+
+For mac os:
+`DYLD_INSERT_LIBRARIES=$(brew --prefix jemalloc)/lib/libjemalloc.dylib pnpm ts-node --transpile-only ./benchmarks/server.ts`
+
+`DYLD_INSERT_LIBRARIES=$(brew --prefix jemalloc)/lib/libjemalloc.dylib NODE_ENV=production clinic flame -- node -r ts-node/register/transpile-only ./benchmarks/server.ts`
+
+`DYLD_INSERT_LIBRARIES=$(brew --prefix jemalloc)/lib/libjemalloc.dylib NODE_ENV=production node -r ts-node/register/transpile-only --trace-opt --trace-deopt ./benchmarks/server.ts`
+
+2. h2load -n200000 -m 50 http://localhost:9999/EchoService/Echo -c10 -t 10 -H 'content-type: application/grpc' -d ./echo-unary.bin
+
+Baseline on M1 Max Laptop:
+
+```
+ES2017 & ESNext targets are within margin of error:
+
+finished in 4.09s, 48851.86 req/s, 2.47MB/s
+requests: 200000 total, 200000 started, 200000 done, 200000 succeeded, 0 failed, 0 errored, 0 timeout
+status codes: 200000 2xx, 0 3xx, 0 4xx, 0 5xx
+traffic: 10.11MB (10603710) total, 978.38KB (1001860) headers (space savings 96.40%), 3.62MB (3800000) data
+                     min         max         mean         sd        +/- sd
+time for request:     2.47ms    104.19ms     10.18ms      3.46ms    94.40%
+time for connect:      790us      1.13ms       879us        98us    90.00%
+time to 1st byte:    12.09ms     97.17ms     52.68ms     28.04ms    60.00%
+req/s           :    4885.61     4922.01     4901.67       14.07    50.00%
+
+
+```
+
+---
+
+Changes to stream decoder:
+
+1. switch -> if
+
+```
+h2load -n200000 -m 50 http://localhost:9999/EchoService/Echo -c10 -t 10 -H 'content-type: application/grpc' -d ./echo-unary.bin
+
+finished in 3.82s, 52410.67 req/s, 2.65MB/s
+requests: 200000 total, 200000 started, 200000 done, 200000 succeeded, 0 failed, 0 errored, 0 timeout
+status codes: 200000 2xx, 0 3xx, 0 4xx, 0 5xx
+traffic: 10.11MB (10603690) total, 978.36KB (1001840) headers (space savings 96.40%), 3.62MB (3800000) data
+                     min         max         mean         sd        +/- sd
+time for request:     1.87ms     47.64ms      9.49ms      1.89ms    97.25%
+time for connect:     1.75ms      3.14ms      2.43ms       410us    70.00%
+time to 1st byte:     6.58ms     45.08ms     23.01ms     13.70ms    60.00%
+req/s           :    5242.32     5270.74     5253.00        9.37    70.00%
+```
+
+2. const enum is comparable to enum
+
+3. fewer buffer.concat,f unsafeAlloc
+
+
+```
+finished in 3.40s, 58763.66 req/s, 987.33KB/s
+requests: 200000 total, 200000 started, 200000 done, 200000 succeeded, 0 failed, 0 errored, 0 timeout
+status codes: 200000 2xx, 0 3xx, 0 4xx, 0 5xx
+traffic: 3.28MB (3441011) total, 1.01MB (1063183) headers (space savings 97.04%), 176.74KB (180986) data
+                     min         max         mean         sd        +/- sd
+time for request:      304us     41.57ms      3.28ms      1.63ms    80.98%
+time for connect:      831us      1.47ms      1.14ms       181us    70.00%
+time to 1st byte:     2.64ms     25.10ms     11.42ms      7.87ms    60.00%
+req/s           :    5877.32     6303.71     6082.75      168.23    50.00%
+```
+
+
+```
+old decoder:
+
+finished in 3.83s, 52210.19 req/s, 2.64MB/s
+requests: 200000 total, 200000 started, 200000 done, 200000 succeeded, 0 failed, 0 errored, 0 timeout
+status codes: 200000 2xx, 0 3xx, 0 4xx, 0 5xx
+traffic: 10.11MB (10603670) total, 978.34KB (1001820) headers (space savings 96.40%), 3.62MB (3800000) data
+                     min         max         mean         sd        +/- sd
+time for request:     1.16ms     18.75ms      3.82ms      1.45ms    88.89%
+time for connect:      723us      1.38ms      1.18ms       191us    80.00%
+time to 1st byte:     3.45ms     17.72ms      9.00ms      4.95ms    70.00%
+req/s           :    5221.65     5235.13     5225.05        4.23    90.00%
+```
+
diff --git a/packages/grpc-js/benchmarks/bench/metadata.js b/packages/grpc-js/benchmarks/bench/metadata.js
new file mode 100644
index 000000000..a9a0991fa
--- /dev/null
+++ b/packages/grpc-js/benchmarks/bench/metadata.js
@@ -0,0 +1,75 @@
+const { benchmark, createBenchmarkSuite } = require('../common');
+const {
+  sensitiveHeaders,
+  constants: {
+    HTTP2_HEADER_ACCEPT_ENCODING,
+    HTTP2_HEADER_TE,
+    HTTP2_HEADER_CONTENT_TYPE,
+  },
+} = require('node:http2');
+const {
+  Metadata: MetadataOriginal,
+} = require('@grpc/grpc-js/build/src/metadata');
+const { Metadata } = require('../../build/src/metadata');
+
+const GRPC_ACCEPT_ENCODING_HEADER = 'grpc-accept-encoding';
+const GRPC_ENCODING_HEADER = 'grpc-encoding';
+const GRPC_TIMEOUT_HEADER = 'grpc-timeout';
+const headers = Object.setPrototypeOf(
+  {
+    ':path': '/EchoService/Echo',
+    ':scheme': 'http',
+    ':authority': 'localhost:9999',
+    ':method': 'POST',
+    'user-agent': 'h2load nghttp2/1.58.0',
+    'content-type': 'application/grpc',
+    'content-length': '19',
+    [GRPC_ACCEPT_ENCODING_HEADER]: 'identity,deflate,gzip',
+    [GRPC_ENCODING_HEADER]: 'identity',
+    [sensitiveHeaders]: [],
+  },
+  null
+);
+
+const ogMeta = MetadataOriginal.fromHttp2Headers(headers);
+const currentMeta = Metadata.fromHttp2Headers(headers);
+
+const removeHeaders = metadata => {
+  metadata.remove(GRPC_TIMEOUT_HEADER);
+  metadata.remove(GRPC_ENCODING_HEADER);
+  metadata.remove(GRPC_ACCEPT_ENCODING_HEADER);
+  metadata.remove(HTTP2_HEADER_ACCEPT_ENCODING);
+  metadata.remove(HTTP2_HEADER_TE);
+  metadata.remove(HTTP2_HEADER_CONTENT_TYPE);
+};
+
+removeHeaders(ogMeta);
+removeHeaders(currentMeta);
+
+createBenchmarkSuite('fromHttp2Headers')
+  .add('1.10.6', function () {
+    MetadataOriginal.fromHttp2Headers(headers);
+  })
+  .add('current', function () {
+    Metadata.fromHttp2Headers(headers);
+  });
+
+createBenchmarkSuite('fromHttp2Headers + common operations')
+  .add('1.10.6', () => {
+    const metadata = MetadataOriginal.fromHttp2Headers(headers);
+    removeHeaders(metadata);
+  })
+  .add('current', () => {
+    const metadata = Metadata.fromHttp2Headers(headers);
+    removeHeaders(metadata);
+  });
+
+createBenchmarkSuite('toHttp2Headers')
+  .add('1.10.6', function () {
+    return ogMeta.toHttp2Headers();
+  })
+  .add('current', function () {
+    return currentMeta.toHttp2Headers();
+  });
+
+benchmark.run();
diff --git a/packages/grpc-js/benchmarks/bench/stream-decoder.js b/packages/grpc-js/benchmarks/bench/stream-decoder.js
new file mode 100644
index 000000000..62416a4da
--- /dev/null
+++ b/packages/grpc-js/benchmarks/bench/stream-decoder.js
@@ -0,0 +1,138 @@
+const { benchmark, createBenchmarkSuite } = require('../common');
+const { serializeMessage } = require('../helpers/encode');
+const { echoService } = require('../helpers/utils');
+const {
+  StreamDecoder: OGStreamDecoder,
+} = require('@grpc/grpc-js/build/src/stream-decoder');
+const {
+  StreamDecoder: NewStreamDecoder,
+  decoder: decoderManager,
+} = require('../../build/src/stream-decoder');
+
+const serializedSmallBinary = serializeMessage(
+  echoService.service.Echo.requestSerialize,
+  {
+    value: 'string-val',
+    value2: 10,
+  }
+);
+const getSmallBinary = () => {
+  const buf = Buffer.allocUnsafe(serializedSmallBinary.length);
+  serializedSmallBinary.copy(buf);
+  return buf;
+};
+
+const getSmallSplit = () => {
+  const binary = getSmallBinary();
+  return [binary.subarray(0, 3), binary.subarray(3, 5), binary.subarray(5)];
+};
+
+const largeObj = {
+  value: 'a'.repeat(2 ** 16),
+  value2: 12803182109,
+};
+const serializedLargeObj = serializeMessage(
+  echoService.service.Echo.requestSerialize,
+  largeObj
+);
+
+const getLargeBinary = () => {
+  const buf = Buffer.allocUnsafeSlow(serializedLargeObj.length);
+  serializedLargeObj.copy(buf);
+  return buf;
+};
+
+const getLargeSplit = () => {
+  const binary = getLargeBinary();
+  return [
+    binary.subarray(0, Math.ceil(Buffer.poolSize * 0.5)),
+    binary.subarray(Math.ceil(Buffer.poolSize * 0.5)),
+  ];
+};
+
+const originalCached = new OGStreamDecoder();
+const currentCached = decoderManager.get();
+
+createBenchmarkSuite('Small Payload')
+  // mark -- original decoder, fresh copies
+  .add('1.10.6', function () {
+    const decoder = new OGStreamDecoder();
+    decoder.write(getSmallBinary());
+  })
+  .add('1.10.6 cached', function () {
+    const decoder = new OGStreamDecoder();
+    decoder.write(getSmallBinary());
+  })
+  .add('current', function () {
+    const decoder = new NewStreamDecoder();
+    decoder.write(getSmallBinary());
+  })
+  .add('current cached', function () {
+    currentCached.write(getSmallBinary());
+  });
+
+createBenchmarkSuite('Small Payload Chunked')
+  .add('1.10.6', function () {
+    const decoder = new OGStreamDecoder();
+    for (const item of getSmallSplit()) {
+      decoder.write(item);
+    }
+  })
+  .add('1.10.6 cached', function () {
+    for (const item of getSmallSplit()) {
+      originalCached.write(item);
+    }
+  })
+  .add('current', function () {
+    const decoder = new NewStreamDecoder();
+    for (const item of getSmallSplit()) {
+      decoder.write(item);
+    }
+  })
+  .add('current cached', function () {
+    for (const item of getSmallSplit()) {
+      currentCached.write(item);
+    }
+  });
+
+createBenchmarkSuite('Large Payload')
+  .add('1.10.6', function () {
+    const decoder = new OGStreamDecoder();
+    decoder.write(getLargeBinary());
+  })
+  .add('1.10.6 cached', function () {
+    originalCached.write(getLargeBinary());
+  })
+  .add('current', function () {
+    const decoder = new NewStreamDecoder();
+    decoder.write(getLargeBinary());
+  })
+  .add('current cached', function () {
+    currentCached.write(getLargeBinary());
+  });
+
+createBenchmarkSuite('Large Payload Chunked')
+  .add('1.10.6', function () {
+    const decoder = new OGStreamDecoder();
+    for (const item of getLargeSplit()) {
+      decoder.write(item);
+    }
+  })
+  .add('1.10.6 cached', function () {
+    for (const item of getLargeSplit()) {
+      originalCached.write(item);
+    }
+  })
+  .add('current', function () {
+    const decoder = new NewStreamDecoder();
+    for (const item of getLargeSplit()) {
+      decoder.write(item);
+    }
+  })
+  .add('current cached', function () {
+    for (const item of getLargeSplit()) {
+      currentCached.write(item);
+    }
+  });
+
+benchmark.run();
diff --git a/packages/grpc-js/benchmarks/common.js b/packages/grpc-js/benchmarks/common.js
new file mode 100644
index 000000000..1b9afda6a
--- /dev/null
+++ b/packages/grpc-js/benchmarks/common.js
@@ -0,0 +1,14 @@
+const Benchmarkify = require('benchmarkify');
+
+const benchmark = new Benchmarkify('grpc-js benchmarks').printHeader();
+
+function createBenchmarkSuite(name) {
+  const suite = benchmark.createSuite(name);
+
+  return suite;
+}
+
+module.exports = {
+  benchmark,
+  createBenchmarkSuite,
+};
diff --git a/packages/grpc-js/benchmarks/echo-unary.bin b/packages/grpc-js/benchmarks/echo-unary.bin
new file mode 100644
index 000000000..5d8a648c1
Binary files /dev/null and b/packages/grpc-js/benchmarks/echo-unary.bin differ
diff --git a/packages/grpc-js/benchmarks/helpers/encode.js b/packages/grpc-js/benchmarks/helpers/encode.js
new file mode 100644
index 000000000..ced6fdc04
--- /dev/null
+++ b/packages/grpc-js/benchmarks/helpers/encode.js
@@ -0,0 +1,41 @@
+const fs = require('node:fs');
+const { resolve } = require('node:path');
+const { echoService } = require('./utils');
+
+/**
+ * Serialize a message to a length-delimited byte string.
+ * @param value
+ * @returns
+ */
+function serializeMessage(serialize, value) {
+  const messageBuffer = serialize(value);
+  const byteLength = messageBuffer.byteLength;
+  const output = Buffer.allocUnsafe(byteLength + 5);
+  /* Note: response compression is currently not supported, so this
+   * compressed bit is always 0. */
+  output.writeUInt8(0, 0);
+  output.writeUInt32BE(byteLength, 1);
+  messageBuffer.copy(output, 5);
+  return output;
+}
+
+const binaryMessage = serializeMessage(
+  echoService.service.Echo.requestSerialize,
+  {
+    value: 'string-val',
+    value2: 10,
+  }
+);
+
+if (require.main === module) {
+  console.log(
+    'Service %s\nEcho binary bytes: %d, hex: %s',
+    echoService.service.Echo.path,
+    binaryMessage.length,
+    binaryMessage.toString('hex')
+  );
+
+  fs.writeFileSync(resolve(__dirname, '../echo-unary.bin'), binaryMessage);
+}
+
+exports.serializeMessage = serializeMessage;
diff --git a/packages/grpc-js/benchmarks/helpers/utils.js b/packages/grpc-js/benchmarks/helpers/utils.js
new file mode 100644
index 000000000..ecca11af4
--- /dev/null
+++ b/packages/grpc-js/benchmarks/helpers/utils.js
@@ -0,0 +1,28 @@
+const loader = require('@grpc/proto-loader');
+const path = require('node:path');
+
+// eslint-disable-next-line node/no-unpublished-import
+const { loadPackageDefinition } = require('../../build/src/make-client');
+
+const protoLoaderOptions = {
+  keepCase: true,
+  longs: String,
+  enums: String,
+  defaults: true,
+  oneofs: true,
+};
+
+function loadProtoFile(file) {
+  const packageDefinition = loader.loadSync(file, protoLoaderOptions);
+  return loadPackageDefinition(packageDefinition);
+}
+
+const protoFile = path.join(
+  __dirname,
+  '../../test/fixtures',
+  'echo_service.proto'
+);
+const echoService = loadProtoFile(protoFile).EchoService;
+
+exports.loadProtoFile = loadProtoFile;
+exports.echoService = echoService;
diff --git a/packages/grpc-js/benchmarks/package.json b/packages/grpc-js/benchmarks/package.json
new file mode 100644
index 000000000..e88845616
--- /dev/null
+++ b/packages/grpc-js/benchmarks/package.json
@@ -0,0 +1,21 @@
+{
+  "name": "benchmarks",
+  "private": true,
+  "version": "1.0.0",
+  "description": "",
+  "main": "index.js",
+  "scripts": {
+    "test": "echo \"Error: no test specified\" && exit 1"
+  },
+  "keywords": [],
+  "author": "",
+  "license": "ISC",
+  "devDependencies": {
+    "@grpc/grpc-js": "1.10.6"
+  },
+  "dependencies": {
+    "@grpc/proto-loader": "^0.7.12",
+    "benchmark": "^2.1.4",
+    "benchmarkify": "^4.0.0"
+  }
+}
diff --git a/packages/grpc-js/benchmarks/server-old.js b/packages/grpc-js/benchmarks/server-old.js
new file mode 100644
index 000000000..3bb72b40b
--- /dev/null
+++ b/packages/grpc-js/benchmarks/server-old.js
@@ -0,0 +1,38 @@
+/* eslint-disable node/no-unpublished-import */
+const { Server, ServerCredentials } = require('@grpc/grpc-js');
+const { echoService } = require('./helpers/utils');
+
+const serviceImpl = {
+  echo: (call, callback) => {
+    callback(null, call.request);
+  },
+};
+
+async function main() {
+  const server = new Server({
+    'grpc.enable_channelz': 0,
+  });
+
+  server.addService(echoService.service, serviceImpl);
+
+  const credentials = ServerCredentials.createInsecure();
+
+  setInterval(
+    () => console.log(`RSS: ${process.memoryUsage().rss / 1024 / 1024} MiB`),
+    5e3
+  ).unref();
+
+  await new Promise((resolve, reject) => {
+    server.bindAsync('localhost:9999', credentials, (error, port) => {
+      if (error) {
+        reject(error);
+        return;
+      }
+
+      console.log('server listening on port %d', port);
+      resolve();
+    });
+  });
+}
+
+main();
diff --git a/packages/grpc-js/benchmarks/server.js b/packages/grpc-js/benchmarks/server.js
new file mode 100644
index 000000000..c9e5948c4
--- /dev/null
+++ b/packages/grpc-js/benchmarks/server.js
@@ -0,0 +1,38 @@
+/* eslint-disable node/no-unpublished-import */
+const { Server, ServerCredentials } = require('../build/src/index');
+const { echoService } = require('./helpers/utils');
+
+const serviceImpl = {
+  echo: (call, callback) => {
+    callback(null, call.request);
+  },
+};
+
+async function main() {
+  const server = new Server({
+    'grpc.enable_channelz': 0,
+  });
+
+  server.addService(echoService.service, serviceImpl);
+
+  const credentials = ServerCredentials.createInsecure();
+
+  setInterval(
+    () => console.log(`RSS: ${process.memoryUsage().rss / 1024 / 1024} MiB`),
+    5e3
+  ).unref();
+
+  await new Promise((resolve, reject) => {
+    server.bindAsync('localhost:9999', credentials, (error, port) => {
+      if (error) {
+        reject(error);
+        return;
+      }
+
+      console.log('server listening on port %d', port);
+      resolve();
+    });
+  });
+}
+
+main();
diff --git a/packages/grpc-js/package.json b/packages/grpc-js/package.json
index 3b8ccf2c1..5c9b12c69 100644
--- a/packages/grpc-js/package.json
+++ b/packages/grpc-js/package.json
@@ -2,6 +2,7 @@
   "name": "@grpc/grpc-js",
   "version": "1.10.6",
   "description": "gRPC Library for Node - pure JS implementation",
+  "module": "commonjs",
   "homepage": "https://grpc.io/",
   "repository": "https://github.com/grpc/grpc-node/tree/master/packages/grpc-js",
   "main": "build/src/index.js",
@@ -20,12 +21,12 @@
     "@types/lodash": "^4.14.202",
     "@types/mocha": "^10.0.6",
     "@types/ncp": "^2.0.8",
-    "@types/node": ">=20.11.20",
+    "@types/node": "^20.12.7",
     "@types/pify": "^5.0.4",
     "@types/semver": "^7.5.8",
-    "@typescript-eslint/eslint-plugin": "^7.1.0",
-    "@typescript-eslint/parser": "^7.1.0",
-    "@typescript-eslint/typescript-estree": "^7.1.0",
+    "@typescript-eslint/eslint-plugin": "^7.7.1",
+    "@typescript-eslint/parser": "^7.7.1",
+    "@typescript-eslint/typescript-estree": "^7.7.1",
     "clang-format": "^1.8.0",
     "eslint": "^8.42.0",
     "eslint-config-prettier": "^8.8.0",
@@ -40,10 +41,10 @@
     "ncp": "^2.0.0",
     "pify": "^4.0.1",
     "prettier": "^2.8.8",
-    "rimraf": "^3.0.2",
+    "rimraf": "^5.0.5",
     "semver": "^7.6.0",
     "ts-node": "^10.9.2",
-    "typescript": "^5.3.3"
+    "typescript": "^5.4.5"
   },
   "contributors": [
     {
@@ -66,8 +67,9 @@
     "generate-test-types": "proto-loader-gen-types --keepCase --longs String --enums String --defaults --oneofs --includeComments --include-dirs test/fixtures/ -O test/generated/ --grpcLib ../../src/index test_service.proto"
   },
   "dependencies": {
-    "@grpc/proto-loader": "^0.7.10",
-    "@js-sdsl/ordered-map": "^4.4.2"
+    "@grpc/proto-loader": "^0.7.12",
+    "@js-sdsl/ordered-map": "^4.4.2",
+    "reusify": "^1.0.4"
   },
   "files": [
     "src/**/*.ts",
diff --git a/packages/grpc-js/src/metadata.ts b/packages/grpc-js/src/metadata.ts
index eabd2dff4..81d27b45d 100644
--- a/packages/grpc-js/src/metadata.ts
+++ b/packages/grpc-js/src/metadata.ts
@@ -19,11 +19,16 @@ import * as http2 from 'http2';
 import { log } from './logging';
 import { LogVerbosity } from './constants';
 import { getErrorMessage } from './error';
+
 const LEGAL_KEY_REGEX = /^[0-9a-z_.-]+$/;
 const LEGAL_NON_BINARY_VALUE_REGEX = /^[ -~]*$/;
+const { isArray } = Array;
+// const { hasOwnProperty } = Object.prototype;
 
 export type MetadataValue = string | Buffer;
-export type MetadataObject = Map<string, MetadataValue[]>;
+export interface MetadataObject {
+  [key: string]: MetadataValue[] | undefined;
+}
 
 function isLegalKey(key: string): boolean {
   return LEGAL_KEY_REGEX.test(key);
@@ -33,12 +38,14 @@ function isLegalNonBinaryValue(value: string): boolean {
   return LEGAL_NON_BINARY_VALUE_REGEX.test(value);
 }
 
+// https://github.com/RafaelGSS/nodejs-bench-operations/blob/main/RESULTS-v20.md#endswith-comparison
 function isBinaryKey(key: string): boolean {
-  return key.endsWith('-bin');
+  // return key.endsWith('-bin');
+  return key.slice(-4) === '-bin';
 }
 
 function isCustomMetadata(key: string): boolean {
-  return !key.startsWith('grpc-');
+  return key.slice(0, 5) !== 'grpc-';
 }
 
 function normalizeKey(key: string): string {
@@ -70,6 +77,42 @@ function validate(key: string, value?: MetadataValue): void {
   }
 }
 
+function validateString(key: string, value: string): void {
+  if (!isLegalKey(key)) {
+    throw new Error('Metadata key "' + key + '" contains illegal characters');
+  }
+
+  if (!isLegalNonBinaryValue(value)) {
+    throw new Error(
+      'Metadata string value "' + value + '" contains illegal characters'
+    );
+  }
+}
+
+function validateStrings(key: string, values: string[]): void {
+  if (!isLegalKey(key)) {
+    throw new Error('Metadata key "' + key + '" contains illegal characters');
+  }
+
+  for (let i = 0; i < values.length; i += 1) {
+    if (!isLegalNonBinaryValue(values[i])) {
+      throw new Error(
+        'Metadata string value "' + values[i] + '" contains illegal characters'
+      );
+    }
+  }
+}
+
+function validateBinary(key: string): void {
+  if (!isLegalKey(key)) {
+    throw new Error('Metadata key "' + key + '" contains illegal characters');
+  }
+
+  if (!isBinaryKey(key)) {
+    throw new Error("keys that end with '-bin' must have Buffer values");
+  }
+}
+
 export interface MetadataOptions {
   /* Signal that the request is idempotent. Defaults to false */
   idempotentRequest?: boolean;
@@ -83,14 +126,25 @@ export interface MetadataOptions {
   corked?: boolean;
 }
 
+function MetadataObject() {}
+MetadataObject.prototype = Object.create(null);
+
 /**
  * A class for storing metadata. Keys are normalized to lowercase ASCII.
  */
 export class Metadata {
-  protected internalRepr: MetadataObject = new Map<string, MetadataValue[]>();
+  // @ts-expect-error - cached object
+  protected internalRepr: MetadataObject = new MetadataObject();
   private options: MetadataOptions;
 
-  constructor(options: MetadataOptions = {}) {
+  constructor(
+    options: MetadataOptions = {
+      idempotentRequest: false,
+      waitForReady: false,
+      cacheableRequest: false,
+      corked: false,
+    }
+  ) {
     this.options = options;
   }
 
@@ -104,7 +158,7 @@ export class Metadata {
   set(key: string, value: MetadataValue): void {
     key = normalizeKey(key);
     validate(key, value);
-    this.internalRepr.set(key, [value]);
+    this.internalRepr[key] = [value];
   }
 
   /**
@@ -118,24 +172,46 @@ export class Metadata {
     key = normalizeKey(key);
     validate(key, value);
 
-    const existingValue: MetadataValue[] | undefined =
-      this.internalRepr.get(key);
+    const existingValue: MetadataValue[] | undefined = this.internalRepr[key];
 
     if (existingValue === undefined) {
-      this.internalRepr.set(key, [value]);
+      this.internalRepr[key] = [value];
     } else {
       existingValue.push(value);
     }
   }
 
+  addString(key: string, value: string): void {
+    validateString(key, value);
+    this.internalRepr[key] = [value];
+  }
+
+  addStrings(key: string, values: string[]): void {
+    validateStrings(key, values);
+    this.internalRepr[key] = values.slice(); // shallow copy
+  }
+
+  addBuffer(key: string, value: Buffer): void {
+    validateBinary(key);
+    this.internalRepr[key] = [value];
+  }
+
+  addBuffers(key: string, values: Buffer[]): void {
+    validateBinary(key);
+    this.internalRepr[key] = values;
+  }
+
   /**
    * Removes the given key and any associated values. Normalizes the key.
    * @param key The key whose values should be removed.
    */
   remove(key: string): void {
-    key = normalizeKey(key);
+    const k = normalizeKey(key);
     // validate(key);
-    this.internalRepr.delete(key);
+    const { internalRepr } = this;
+    if (k in internalRepr) {
+      internalRepr[k] = undefined; // expensive, but cheaper in new versions
+    }
   }
 
   /**
@@ -144,9 +220,7 @@ export class Metadata {
    * @return A list of values associated with the given key.
    */
   get(key: string): MetadataValue[] {
-    key = normalizeKey(key);
-    // validate(key);
-    return this.internalRepr.get(key) || [];
+    return this.internalRepr[normalizeKey(key)] || [];
   }
 
   /**
@@ -156,10 +230,16 @@ export class Metadata {
    */
   getMap(): { [key: string]: MetadataValue } {
     const result: { [key: string]: MetadataValue } = {};
-
-    for (const [key, values] of this.internalRepr) {
-      if (values.length > 0) {
-        const v = values[0];
+    const keys = Object.keys(this.internalRepr);
+
+    let values;
+    let key;
+    let v;
+    for (let i = 0; i < keys.length; i += 1) {
+      key = keys[i];
+      values = this.internalRepr[key];
+      if (values !== undefined && values.length > 0) {
+        v = values[0];
         result[key] = Buffer.isBuffer(v) ? Buffer.from(v) : v;
       }
     }
@@ -173,17 +253,24 @@ export class Metadata {
   clone(): Metadata {
     const newMetadata = new Metadata(this.options);
     const newInternalRepr = newMetadata.internalRepr;
+    const keys = Object.keys(this.internalRepr);
+
+    let values;
+    let key;
+    for (let i = 0; i < keys.length; i += 1) {
+      key = keys[i];
+      values = this.internalRepr[key];
+      if (values !== undefined) {
+        const clonedValue: MetadataValue[] = values.map(v => {
+          if (Buffer.isBuffer(v)) {
+            return Buffer.from(v);
+          } else {
+            return v;
+          }
+        });
 
-    for (const [key, value] of this.internalRepr) {
-      const clonedValue: MetadataValue[] = value.map(v => {
-        if (Buffer.isBuffer(v)) {
-          return Buffer.from(v);
-        } else {
-          return v;
-        }
-      });
-
-      newInternalRepr.set(key, clonedValue);
+        newInternalRepr[key] = clonedValue;
+      }
     }
 
     return newMetadata;
@@ -197,12 +284,19 @@ export class Metadata {
    * @param other A Metadata object.
    */
   merge(other: Metadata): void {
-    for (const [key, values] of other.internalRepr) {
+    const keys = Object.keys(other.internalRepr);
+
+    let values;
+    let key;
+    for (let i = 0; i < keys.length; i += 1) {
+      key = keys[i];
+      values = other.internalRepr[key] || [];
+
       const mergedValue: MetadataValue[] = (
-        this.internalRepr.get(key) || []
+        this.internalRepr[key] || []
       ).concat(values);
 
-      this.internalRepr.set(key, mergedValue);
+      this.internalRepr[key] = mergedValue;
     }
   }
 
@@ -218,13 +312,14 @@ export class Metadata {
    * Creates an OutgoingHttpHeaders object that can be used with the http2 API.
    */
   toHttp2Headers(): http2.OutgoingHttpHeaders {
-    // NOTE: Node <8.9 formats http2 headers incorrectly.
-    const result: http2.OutgoingHttpHeaders = {};
+    const result: http2.OutgoingHttpHeaders = Object.create(null);
+    const o = this.internalRepr;
 
-    for (const [key, values] of this.internalRepr) {
-      // We assume that the user's interaction with this object is limited to
-      // through its public API (i.e. keys and values are already validated).
-      result[key] = values.map(bufToString);
+    for (const k in o) {
+      const cur = o[k];
+      if (cur !== undefined) {
+        result[k] = isBinaryKey(k) ? cur.map(bufToString) : (cur as string[]);
+      }
     }
 
     return result;
@@ -236,8 +331,16 @@ export class Metadata {
    */
   toJSON() {
     const result: { [key: string]: MetadataValue[] } = {};
-    for (const [key, values] of this.internalRepr) {
-      result[key] = values;
+    const keys = Object.keys(this.internalRepr);
+
+    let values;
+    let key;
+    for (let i = 0; i < keys.length; i += 1) {
+      key = keys[i];
+      values = this.internalRepr[key];
+      if (values !== undefined) {
+        result[key] = values;
+      }
     }
     return result;
   }
@@ -249,38 +352,25 @@ export class Metadata {
    */
   static fromHttp2Headers(headers: http2.IncomingHttpHeaders): Metadata {
     const result = new Metadata();
-    for (const key of Object.keys(headers)) {
+    const keys = Object.keys(headers);
+
+    let key: string;
+    let values: string | string[] | undefined;
+
+    for (let i = 0; i < keys.length; i += 1) {
+      key = keys[i];
       // Reserved headers (beginning with `:`) are not valid keys.
       if (key.charAt(0) === ':') {
         continue;
       }
 
-      const values = headers[key];
+      values = headers[key];
+      if (values === undefined) {
+        continue;
+      }
 
       try {
-        if (isBinaryKey(key)) {
-          if (Array.isArray(values)) {
-            values.forEach(value => {
-              result.add(key, Buffer.from(value, 'base64'));
-            });
-          } else if (values !== undefined) {
-            if (isCustomMetadata(key)) {
-              values.split(',').forEach(v => {
-                result.add(key, Buffer.from(v.trim(), 'base64'));
-              });
-            } else {
-              result.add(key, Buffer.from(values, 'base64'));
-            }
-          }
-        } else {
-          if (Array.isArray(values)) {
-            values.forEach(value => {
-              result.add(key, value);
-            });
-          } else if (values !== undefined) {
-            result.add(key, values);
-          }
-        }
+        handleMetadataValue(result, key, values);
       } catch (error) {
         const message = `Failed to add metadata entry ${key}: ${values}. ${getErrorMessage(
           error
@@ -293,6 +383,32 @@ export class Metadata {
   }
 }
 
+function handleMetadataValue(
+  result: Metadata,
+  key: string,
+  values: string | string[]
+): void {
+  if (isBinaryKey(key)) {
+    if (isArray(values)) {
+      result.addBuffers(key, values.map(toBufferFromBase64));
+    } else if (isCustomMetadata(key)) {
+      result.addBuffers(key, values.split(',').map(toBufferFromBase64Trim));
+    } else {
+      result.addBuffer(key, toBufferFromBase64(values));
+    }
+  } else {
+    if (isArray(values)) {
+      result.addStrings(key, values);
+    } else {
+      result.addString(key, values);
+    }
+  }
+}
+
 const bufToString = (val: string | Buffer): string => {
   return Buffer.isBuffer(val) ? val.toString('base64') : val;
 };
+
+const toBufferFromBase64 = (v: string): Buffer => Buffer.from(v, 'base64');
+const toBufferFromBase64Trim = (v: string): Buffer =>
+  Buffer.from(v.trim(), 'base64');
diff --git a/packages/grpc-js/src/server-interceptors.ts b/packages/grpc-js/src/server-interceptors.ts
index b62d55108..6a47c5c67 100644
--- a/packages/grpc-js/src/server-interceptors.ts
+++ b/packages/grpc-js/src/server-interceptors.ts
@@ -31,7 +31,7 @@ import * as http2 from 'http2';
 import { getErrorMessage } from './error';
 import * as zlib from 'zlib';
 import { promisify } from 'util';
-import { StreamDecoder } from './stream-decoder';
+import { GrpcFrame, decoder } from './stream-decoder';
 import { CallEventTracker } from './transport';
 import * as logging from './logging';
 
@@ -44,6 +44,8 @@ function trace(text: string) {
   logging.trace(LogVerbosity.DEBUG, TRACER_NAME, text);
 }
 
+type GrpcWriteFrame = [header: Buffer, message: Buffer];
+
 export interface ServerMetadataListener {
   (metadata: Metadata, next: (metadata: Metadata) => void): void;
 }
@@ -478,7 +480,7 @@ type ReadQueueEntryType = 'COMPRESSED' | 'READABLE' | 'HALF_CLOSE';
 
 interface ReadQueueEntry {
   type: ReadQueueEntryType;
-  compressedMessage: Buffer | null;
+  compressedMessage: GrpcFrame | null;
   parsedMessage: any;
 }
 
@@ -496,7 +498,7 @@ export class BaseServerInterceptingCall
   private wantTrailers = false;
   private cancelNotified = false;
   private incomingEncoding = 'identity';
-  private decoder = new StreamDecoder();
+  private decoder = decoder.get();
   private readQueue: ReadQueueEntry[] = [];
   private isReadPending = false;
   private receivedHalfClose = false;
@@ -536,6 +538,9 @@ export class BaseServerInterceptingCall
       }
 
       this.notifyOnCancel();
+
+      // release current decoder
+      decoder.release(this.decoder);
     });
 
     this.stream.on('data', (data: Buffer) => {
@@ -632,7 +637,7 @@ export class BaseServerInterceptingCall
     }
     this.cancelNotified = true;
     this.cancelled = true;
-    process.nextTick(() => {
+    queueMicrotask(() => {
       this.listener?.onCancel();
     });
     if (this.deadlineTimer) {
@@ -658,35 +663,33 @@ export class BaseServerInterceptingCall
    * @param value
    * @returns
    */
-  private serializeMessage(value: any) {
+  private serializeMessage(value: any): GrpcWriteFrame {
     const messageBuffer = this.handler.serialize(value);
-    const byteLength = messageBuffer.byteLength;
-    const output = Buffer.allocUnsafe(byteLength + 5);
-    /* Note: response compression is currently not supported, so this
-     * compressed bit is always 0. */
-    output.writeUInt8(0, 0);
-    output.writeUInt32BE(byteLength, 1);
-    messageBuffer.copy(output, 5);
-    return output;
+    const { byteLength } = messageBuffer;
+
+    const header = Buffer.allocUnsafe(5);
+    header.writeUint8(0, 0);
+    header.writeUint32BE(byteLength, 1);
+
+    return [header, messageBuffer];
   }
 
   private decompressMessage(
     message: Buffer,
     encoding: string
-  ): Buffer | Promise<Buffer> {
-    switch (encoding) {
-      case 'deflate':
-        return inflate(message.subarray(5));
-      case 'gzip':
-        return unzip(message.subarray(5));
-      case 'identity':
-        return message.subarray(5);
-      default:
-        return Promise.reject({
-          code: Status.UNIMPLEMENTED,
-          details: `Received message compressed with unsupported encoding "${encoding}"`,
-        });
+  ): Promise<Buffer> {
+    if (encoding === 'deflate') {
+      return inflate(message);
     }
+
+    if (encoding === 'gzip') {
+      return unzip(message);
+    }
+
+    throw {
+      code: Status.UNIMPLEMENTED,
+      details: `Received message compressed with unsupported encoding "${encoding}"`,
+    };
   }
 
   private async decompressAndMaybePush(queueEntry: ReadQueueEntry) {
@@ -694,23 +697,23 @@ export class BaseServerInterceptingCall
       throw new Error(`Invalid queue entry type: ${queueEntry.type}`);
     }
 
-    const compressed = queueEntry.compressedMessage!.readUInt8(0) === 1;
-    const compressedMessageEncoding = compressed
-      ? this.incomingEncoding
-      : 'identity';
-    const decompressedMessage = await this.decompressMessage(
-      queueEntry.compressedMessage!,
-      compressedMessageEncoding
-    );
+    const msg = queueEntry.compressedMessage!;
+    const compressed = msg!.compressed === 1;
+
     try {
+      const decompressedMessage = compressed
+        ? await this.decompressMessage(msg.message, this.incomingEncoding)
+        : msg.message;
+
       queueEntry.parsedMessage = this.handler.deserialize(decompressedMessage);
-    } catch (err) {
+    } catch (err: any) {
       this.sendStatus({
-        code: Status.INTERNAL,
-        details: `Error deserializing request: ${(err as Error).message}`,
+        code: err.code || Status.INTERNAL,
+        details: err.details || `Error deserializing request: ${err.message}`,
       });
       return;
     }
+
     queueEntry.type = 'READABLE';
     this.maybePushNextMessage();
   }
@@ -743,26 +746,27 @@ export class BaseServerInterceptingCall
         ' received data frame of size ' +
         data.length
     );
-    const rawMessages = this.decoder.write(data);
+    const rawMessages: GrpcFrame[] = this.decoder.write(data);
 
-    for (const messageBytes of rawMessages) {
+    if (rawMessages.length > 0) {
       this.stream.pause();
+    }
+
+    for (const message of rawMessages) {
       if (
         this.maxReceiveMessageSize !== -1 &&
-        messageBytes.length - 5 > this.maxReceiveMessageSize
+        message.size > this.maxReceiveMessageSize
       ) {
         this.sendStatus({
           code: Status.RESOURCE_EXHAUSTED,
-          details: `Received message larger than max (${
-            messageBytes.length - 5
-          } vs. ${this.maxReceiveMessageSize})`,
+          details: `Received message larger than max (${message.size} vs. ${this.maxReceiveMessageSize})`,
           metadata: null,
         });
         return;
       }
       const queueEntry: ReadQueueEntry = {
         type: 'COMPRESSED',
-        compressedMessage: messageBytes,
+        compressedMessage: message,
         parsedMessage: null,
       };
       this.readQueue.push(queueEntry);
@@ -809,7 +813,7 @@ export class BaseServerInterceptingCall
     if (this.checkCancelled()) {
       return;
     }
-    let response: Buffer;
+    let response: GrpcWriteFrame;
     try {
       response = this.serializeMessage(message);
     } catch (e) {
@@ -823,11 +827,11 @@ export class BaseServerInterceptingCall
 
     if (
       this.maxSendMessageSize !== -1 &&
-      response.length - 5 > this.maxSendMessageSize
+      response[1].length > this.maxSendMessageSize
     ) {
       this.sendStatus({
         code: Status.RESOURCE_EXHAUSTED,
-        details: `Sent message larger than max (${response.length} vs. ${this.maxSendMessageSize})`,
+        details: `Sent message larger than max (${response[1].length} vs. ${this.maxSendMessageSize})`,
         metadata: null,
       });
       return;
@@ -837,9 +841,13 @@ export class BaseServerInterceptingCall
       'Request to ' +
         this.handler.path +
         ' sent data frame of size ' +
-        response.length
+        response[1].length
     );
-    this.stream.write(response, error => {
+    const { stream } = this;
+
+    // TODO: measure cork() / uncork() ?
+    stream.write(response[0]);
+    stream.write(response[1], error => {
       if (error) {
         this.sendStatus({
           code: Status.INTERNAL,
diff --git a/packages/grpc-js/src/server.ts b/packages/grpc-js/src/server.ts
index feb511b41..e988f2ce4 100644
--- a/packages/grpc-js/src/server.ts
+++ b/packages/grpc-js/src/server.ts
@@ -1057,6 +1057,8 @@ export class Server {
   /**
    * @deprecated No longer needed as of version 1.10.x
    */
+  // eslint-disable-next-line @typescript-eslint/ban-ts-comment
+  // @ts-ignore
   @deprecate(
     'Calling start() is no longer necessary. It can be safely omitted.'
   )
diff --git a/packages/grpc-js/src/stream-decoder.ts b/packages/grpc-js/src/stream-decoder.ts
index 671ad41ae..1d36701cf 100644
--- a/packages/grpc-js/src/stream-decoder.ts
+++ b/packages/grpc-js/src/stream-decoder.ts
@@ -15,91 +15,166 @@
  *
  */
 
-enum ReadState {
+// @ts-expect-error no types
+import * as reusify from 'reusify';
+export interface GrpcFrame {
+  compressed: number;
+  size: number;
+  message: Buffer;
+}
+
+const enum ReadState {
   NO_DATA,
   READING_SIZE,
   READING_MESSAGE,
 }
 
-export class StreamDecoder {
-  private readState: ReadState = ReadState.NO_DATA;
-  private readCompressFlag: Buffer = Buffer.alloc(1);
-  private readPartialSize: Buffer = Buffer.alloc(4);
-  private readSizeRemaining = 4;
-  private readMessageSize = 0;
-  private readPartialMessage: Buffer[] = [];
-  private readMessageRemaining = 0;
+const kMessageSizeBytes = 4 as const;
+const kEmptyMessage = Buffer.alloc(0);
+
+interface StreamDecoder {
+  next: StreamDecoder | null;
+  readState: ReadState;
+  readCompressFlag: number;
+  readPartialSize: Buffer;
+  readSizeRemaining: number;
+  readMessageSize: number;
+  readPartialMessage: Buffer | null;
+  readMessageRemaining: number;
+
+  write(data: Buffer): GrpcFrame[];
+}
+
+export function StreamDecoder(this: StreamDecoder) {
+  // reusify reference
+  this.next = null;
+
+  // internal state
+  this.readState = ReadState.NO_DATA;
+  this.readCompressFlag = 0;
+  this.readPartialSize = Buffer.alloc(kMessageSizeBytes);
+  this.readSizeRemaining = kMessageSizeBytes;
+  this.readMessageSize = 0;
+  this.readPartialMessage = null;
+  this.readMessageRemaining = 0;
 
-  write(data: Buffer): Buffer[] {
+  // eslint-disable-next-line @typescript-eslint/no-this-alias
+  const that = this;
+
+  this.write = function decodeInputBufferStream(
+    this: undefined,
+    data: Buffer
+  ): GrpcFrame[] {
     let readHead = 0;
-    let toRead: number;
-    const result: Buffer[] = [];
-
-    while (readHead < data.length) {
-      switch (this.readState) {
-        case ReadState.NO_DATA:
-          this.readCompressFlag = data.slice(readHead, readHead + 1);
-          readHead += 1;
-          this.readState = ReadState.READING_SIZE;
-          this.readPartialSize.fill(0);
-          this.readSizeRemaining = 4;
-          this.readMessageSize = 0;
-          this.readMessageRemaining = 0;
-          this.readPartialMessage = [];
-          break;
-        case ReadState.READING_SIZE:
-          toRead = Math.min(data.length - readHead, this.readSizeRemaining);
+    let toRead = 0;
+    const result: GrpcFrame[] = [];
+    const len = data.length;
+
+    while (readHead < len) {
+      const { readState } = that;
+      if (readState === ReadState.NO_DATA) {
+        that.readCompressFlag = data.readUint8(readHead);
+        readHead += 1;
+        that.readState = ReadState.READING_SIZE;
+
+        // size prop
+        that.readSizeRemaining = kMessageSizeBytes;
+
+        // message body props
+        that.readMessageSize = 0;
+        that.readMessageRemaining = 0;
+        that.readPartialMessage = null;
+      } else if (readState === ReadState.READING_SIZE) {
+        let { readSizeRemaining } = that;
+        toRead = Math.min(len - readHead, readSizeRemaining);
+
+        // we read everything in 1 go
+        if (toRead === kMessageSizeBytes) {
+          that.readMessageSize = data.readUInt32BE(readHead);
+          readSizeRemaining = 0;
+        } else {
+          // we only have partial bytes available to us
           data.copy(
-            this.readPartialSize,
-            4 - this.readSizeRemaining,
+            that.readPartialSize,
+            kMessageSizeBytes - readSizeRemaining,
             readHead,
             readHead + toRead
           );
-          this.readSizeRemaining -= toRead;
-          readHead += toRead;
-          // readSizeRemaining >=0 here
-          if (this.readSizeRemaining === 0) {
-            this.readMessageSize = this.readPartialSize.readUInt32BE(0);
-            this.readMessageRemaining = this.readMessageSize;
-            if (this.readMessageRemaining > 0) {
-              this.readState = ReadState.READING_MESSAGE;
-            } else {
-              const message = Buffer.concat(
-                [this.readCompressFlag, this.readPartialSize],
-                5
-              );
-
-              this.readState = ReadState.NO_DATA;
-              result.push(message);
-            }
+
+          readSizeRemaining -= toRead;
+          if (readSizeRemaining === 0) {
+            that.readMessageSize = that.readPartialSize.readUInt32BE(0);
           }
-          break;
-        case ReadState.READING_MESSAGE:
-          toRead = Math.min(data.length - readHead, this.readMessageRemaining);
-          this.readPartialMessage.push(data.slice(readHead, readHead + toRead));
-          this.readMessageRemaining -= toRead;
-          readHead += toRead;
-          // readMessageRemaining >=0 here
-          if (this.readMessageRemaining === 0) {
-            // At this point, we have read a full message
-            const framedMessageBuffers = [
-              this.readCompressFlag,
-              this.readPartialSize,
-            ].concat(this.readPartialMessage);
-            const framedMessage = Buffer.concat(
-              framedMessageBuffers,
-              this.readMessageSize + 5
-            );
-
-            this.readState = ReadState.NO_DATA;
-            result.push(framedMessage);
+        }
+
+        that.readSizeRemaining = readSizeRemaining;
+        readHead += toRead;
+
+        // readSizeRemaining >=0 here
+        if (readSizeRemaining === 0) {
+          const { readMessageSize } = that;
+          if (readMessageSize > 0) {
+            that.readState = ReadState.READING_MESSAGE;
+            that.readMessageRemaining = readMessageSize;
+
+            // allocate buffer / partial message array if we don't have all the data yet
+            if (len - readHead < readMessageSize) {
+              that.readPartialMessage =
+                readMessageSize <= Buffer.poolSize * 0.5
+                  ? Buffer.allocUnsafe(readMessageSize)
+                  : Buffer.allocUnsafeSlow(readMessageSize);
+            }
+          } else {
+            that.readState = ReadState.NO_DATA;
+            result.push({
+              compressed: that.readCompressFlag,
+              size: 0,
+              message: kEmptyMessage,
+            });
           }
-          break;
-        default:
-          throw new Error('Unexpected read state');
+        }
+      } else if (readState === ReadState.READING_MESSAGE) {
+        const { readMessageSize } = that;
+        let { readMessageRemaining } = that;
+        toRead = Math.min(len - readHead, readMessageRemaining);
+
+        if (toRead === readMessageSize) {
+          that.readPartialMessage = data.subarray(readHead, readHead + toRead);
+        } else {
+          data.copy(
+            that.readPartialMessage!,
+            readMessageSize - readMessageRemaining,
+            readHead,
+            readHead + toRead
+          );
+        }
+
+        readMessageRemaining -= toRead;
+        readHead += toRead;
+
+        // readMessageRemaining >=0 here
+        if (readMessageRemaining === 0) {
+          // At that point, we have read a full message
+          result.push({
+            compressed: that.readCompressFlag,
+            size: readMessageSize,
+            message: that.readPartialMessage!,
+          });
+
+          that.readState = ReadState.NO_DATA;
+        } else {
+          that.readMessageRemaining = readMessageRemaining;
+        }
+      } else {
+        throw new Error('Unexpected read state');
       }
     }
 
     return result;
-  }
+  };
 }
+
+export const decoder = reusify(StreamDecoder) as {
+  get(): StreamDecoder;
+  release(decoder: StreamDecoder): void;
+};
diff --git a/packages/grpc-js/src/subchannel-call.ts b/packages/grpc-js/src/subchannel-call.ts
index d54a6bcbf..6befa79d5 100644
--- a/packages/grpc-js/src/subchannel-call.ts
+++ b/packages/grpc-js/src/subchannel-call.ts
@@ -20,7 +20,7 @@ import * as os from 'os';
 
 import { Status } from './constants';
 import { Metadata } from './metadata';
-import { StreamDecoder } from './stream-decoder';
+import { decoder } from './stream-decoder';
 import * as logging from './logging';
 import { LogVerbosity } from './constants';
 import {
@@ -83,7 +83,7 @@ export interface SubchannelCallInterceptingListener
 }
 
 export class Http2SubchannelCall implements SubchannelCall {
-  private decoder = new StreamDecoder();
+  private decoder = decoder.get();
 
   private isReadFilterPending = false;
   private isPushPending = false;
@@ -175,9 +175,16 @@ export class Http2SubchannelCall implements SubchannelCall {
       const messages = this.decoder.write(data);
 
       for (const message of messages) {
-        this.trace('parsed message of length ' + message.length);
+        this.trace('parsed message of length ' + message.size);
         this.callEventTracker!.addMessageReceived();
-        this.tryPush(message);
+
+        // TODO: a teach all the client-side interceptors to work with decoded GrpcFrames
+        const messageBytes = Buffer.allocUnsafe(message.size + 5);
+        messageBytes.writeUint8(message.compressed, 0);
+        messageBytes.writeUint32BE(message.size, 1);
+        message.message.copy(messageBytes, 5);
+
+        this.tryPush(messageBytes);
       }
     });
     http2Stream.on('end', () => {
@@ -186,6 +193,8 @@ export class Http2SubchannelCall implements SubchannelCall {
     });
     http2Stream.on('close', () => {
       this.serverEndedCall = true;
+      decoder.release(this.decoder);
+
       /* Use process.next tick to ensure that this code happens after any
        * "error" event that may be emitted at about the same time, so that
        * we can bubble up the error message from that event. */
diff --git a/packages/grpc-js/test/test-metadata.ts b/packages/grpc-js/test/test-metadata.ts
index 44182ef39..d019fa63f 100644
--- a/packages/grpc-js/test/test-metadata.ts
+++ b/packages/grpc-js/test/test-metadata.ts
@@ -18,7 +18,7 @@
 import * as assert from 'assert';
 import * as http2 from 'http2';
 import { range } from 'lodash';
-import { Metadata, MetadataObject, MetadataValue } from '../src/metadata';
+import { Metadata, MetadataObject } from '../src/metadata';
 
 class TestMetadata extends Metadata {
   getInternalRepresentation() {
@@ -272,20 +272,26 @@ describe('Metadata', () => {
       metadata.add('key-bin', Buffer.from(range(16, 32)));
       metadata.add('key-bin', Buffer.from(range(0, 32)));
       const headers = metadata.toHttp2Headers();
-      assert.deepStrictEqual(headers, {
-        key1: ['value1'],
-        key2: ['value2'],
-        key3: ['value3a', 'value3b'],
-        'key-bin': [
-          'AAECAwQFBgcICQoLDA0ODw==',
-          'EBESExQVFhcYGRobHB0eHw==',
-          'AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8=',
-        ],
-      });
+      assert.deepStrictEqual(
+        headers,
+        Object.setPrototypeOf(
+          {
+            key1: ['value1'],
+            key2: ['value2'],
+            key3: ['value3a', 'value3b'],
+            'key-bin': [
+              'AAECAwQFBgcICQoLDA0ODw==',
+              'EBESExQVFhcYGRobHB0eHw==',
+              'AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8=',
+            ],
+          },
+          null
+        )
+      );
     });
 
     it('creates an empty header object from empty Metadata', () => {
-      assert.deepStrictEqual(metadata.toHttp2Headers(), {});
+      assert.deepStrictEqual(metadata.toHttp2Headers(), Object.create(null));
     });
   });
 
@@ -304,7 +310,7 @@ describe('Metadata', () => {
       };
       const metadataFromHeaders = TestMetadata.fromHttp2Headers(headers);
       const internalRepr = metadataFromHeaders.getInternalRepresentation();
-      const expected: MetadataObject = new Map<string, MetadataValue[]>([
+      const expected: MetadataObject = Object.fromEntries([
         ['key1', ['value1']],
         ['key2', ['value2']],
         ['key3', ['value3a', 'value3b']],
@@ -318,13 +324,13 @@ describe('Metadata', () => {
           ],
         ],
       ]);
-      assert.deepStrictEqual(internalRepr, expected);
+      assert.deepEqual(internalRepr, Object.setPrototypeOf(expected, null));
     });
 
     it('creates an empty Metadata object from empty headers', () => {
       const metadataFromHeaders = TestMetadata.fromHttp2Headers({});
       const internalRepr = metadataFromHeaders.getInternalRepresentation();
-      assert.deepStrictEqual(internalRepr, new Map<string, MetadataValue[]>());
+      assert.deepEqual(internalRepr, Object.create(null));
     });
   });
 });
diff --git a/packages/grpc-js/tsconfig.json b/packages/grpc-js/tsconfig.json
index 763ceda98..549205826 100644
--- a/packages/grpc-js/tsconfig.json
+++ b/packages/grpc-js/tsconfig.json
@@ -10,9 +10,9 @@
     "pretty": true,
     "sourceMap": true,
     "strict": true,
-    "lib": ["es2017"],
+    "alwaysStrict": true,
     "outDir": "build",
-    "target": "es2017",
+    "target": "ES2022",
     "module": "commonjs",
     "resolveJsonModule": true,
     "incremental": true,
diff --git a/packages/grpc-js/tsconfig.modern.json b/packages/grpc-js/tsconfig.modern.json
new file mode 100644
index 000000000..7937cfbb0
--- /dev/null
+++ b/packages/grpc-js/tsconfig.modern.json
@@ -0,0 +1,8 @@
+{
+  "extends": "./tsconfig.json",
+  "compilerOptions": {
+    "target": "ESNext",
+    "experimentalDecorators": true,
+    "emitDecoratorMetadata": true
+  }
+}