Skip to content

Commit

Permalink
fix: ensure binary events can handle no content-type header (#134)
Browse files Browse the repository at this point in the history
* fix: ensure binary events can handle no content-type header

The fix provided in #118
only included tests for `receiver.check()`, and the change in that
case was to add the `application/json` content type to the cleansed
headers if to type was specified.

However, `receiver.parse()` did not receive the benefit of this change. It
calls `this.check()` but then sanitizes the original headers again, and the
missing content-type was not re-inserted into the newly sanitized headers.

This commit, modifies the code so that `receiver.check()` does not insert
the content-type, but does allow the validation check to pass if no
content-type header exists. When `receiver.parse()` is called, and the
headers are sanitized again - and this time used to look up parser implementation,
the default `application/json` content-is applied if no content-type header
exists.

I've also removed a redundant call to `receiver.check()` in receiver_binary_1.js
and simplified the usage of `Constants` in the test.

Signed-off-by: Lance Ball <lball@redhat.com>

* chore: clean up header sniffing

Signed-off-by: Lance Ball <lball@redhat.com>
  • Loading branch information
lance authored May 9, 2020
1 parent c1fda94 commit 72a87df
Show file tree
Hide file tree
Showing 3 changed files with 70 additions and 29 deletions.
27 changes: 14 additions & 13 deletions lib/bindings/http/receiver_binary.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
const Constants = require("./constants.js");
const { HEADER_CONTENT_TYPE, MIME_JSON, DEFAULT_SPEC_VERSION_HEADER } =
require("./constants.js");
const Commons = require("./commons.js");
const CloudEvent = require("../../cloudevent.js");

Expand Down Expand Up @@ -52,16 +53,13 @@ BinaryHTTPReceiver.prototype.check = function(payload, headers) {
// Clone and low case all headers names
const sanityHeaders = Commons.sanityAndClone(headers);

// If no content type is provided, default to application/json
if (!sanityHeaders[Constants.HEADER_CONTENT_TYPE]) {
sanityHeaders[Constants.HEADER_CONTENT_TYPE] = Constants.MIME_JSON;
}

// Validation Level 1
if (!this.allowedContentTypes
.includes(sanityHeaders[Constants.HEADER_CONTENT_TYPE])) {
// Validation Level 1 - if content-type exists, be sure it's
// an allowed type
const contentTypeHeader = sanityHeaders[HEADER_CONTENT_TYPE];
const noContentType = !this.allowedContentTypes.includes(contentTypeHeader);
if (contentTypeHeader && noContentType) {
const err = new TypeError("invalid content type");
err.errors = [sanityHeaders[Constants.HEADER_CONTENT_TYPE]];
err.errors = [sanityHeaders[HEADER_CONTENT_TYPE]];
throw err;
}

Expand All @@ -71,10 +69,10 @@ BinaryHTTPReceiver.prototype.check = function(payload, headers) {
throw new TypeError(`header '${required}' not found`);
});

if (sanityHeaders[Constants.DEFAULT_SPEC_VERSION_HEADER] !==
if (sanityHeaders[DEFAULT_SPEC_VERSION_HEADER] !==
this.specversion) {
const err = new TypeError("invalid spec version");
err.errors = [sanityHeaders[Constants.DEFAULT_SPEC_VERSION_HEADER]];
err.errors = [sanityHeaders[DEFAULT_SPEC_VERSION_HEADER]];
throw err;
}

Expand All @@ -83,14 +81,17 @@ BinaryHTTPReceiver.prototype.check = function(payload, headers) {

function parserFor(parsersByEncoding, cloudevent, headers) {
const encoding = cloudevent.spec.payload.datacontentencoding;
return parsersByEncoding[encoding][headers[Constants.HEADER_CONTENT_TYPE]];
return parsersByEncoding[encoding][headers[HEADER_CONTENT_TYPE]];
}

BinaryHTTPReceiver.prototype.parse = function(payload, headers) {
this.check(payload, headers);

// Clone and low case all headers names
const sanityHeaders = Commons.sanityAndClone(headers);
if (!sanityHeaders[HEADER_CONTENT_TYPE]) {
sanityHeaders[HEADER_CONTENT_TYPE] = MIME_JSON;
}

const processedHeaders = [];
const cloudevent = new CloudEvent(this.Spec);
Expand Down
3 changes: 0 additions & 3 deletions lib/bindings/http/receiver_binary_1.js
Original file line number Diff line number Diff line change
Expand Up @@ -101,9 +101,6 @@ Receiver.prototype.check = function(payload, headers) {
};

Receiver.prototype.parse = function(payload, headers) {
// firstly specific local checks
this.check(payload, headers);

payload = isString(payload) && isBase64(payload)
? Buffer.from(payload, "base64").toString()
: payload;
Expand Down
69 changes: 56 additions & 13 deletions test/bindings/http/promiscuous_receiver_test.js
Original file line number Diff line number Diff line change
@@ -1,6 +1,13 @@
const { expect } = require("chai");
const { CloudEvent, HTTPReceiver } = require("../../../index.js");
const constants = require("../../../lib/bindings/http/constants.js");
const {
HEADER_CONTENT_TYPE,
DEFAULT_CONTENT_TYPE,
MIME_CE_JSON,
DEFAULT_SPEC_VERSION_HEADER,
BINARY_HEADERS_03,
BINARY_HEADERS_1
} = require("../../../lib/bindings/http/constants.js");

const receiver = new HTTPReceiver();
const id = "1234";
Expand All @@ -24,7 +31,7 @@ describe("HTTP Transport Binding Receiver for CloudEvents", () => {
};

const headers = {
[constants.HEADER_CONTENT_TYPE]: constants.MIME_CE_JSON
[HEADER_CONTENT_TYPE]: MIME_CE_JSON
};

const event = receiver.accept(headers, payload);
Expand All @@ -33,11 +40,11 @@ describe("HTTP Transport Binding Receiver for CloudEvents", () => {

it("Binary data returns a CloudEvent", () => {
const headers = {
[constants.HEADER_CONTENT_TYPE]: constants.DEFAULT_CONTENT_TYPE,
[constants.DEFAULT_SPEC_VERSION_HEADER]: specversion,
[constants.BINARY_HEADERS_1.ID]: id,
[constants.BINARY_HEADERS_1.TYPE]: type,
[constants.BINARY_HEADERS_1.SOURCE]: source
[HEADER_CONTENT_TYPE]: DEFAULT_CONTENT_TYPE,
[DEFAULT_SPEC_VERSION_HEADER]: specversion,
[BINARY_HEADERS_1.ID]: id,
[BINARY_HEADERS_1.TYPE]: type,
[BINARY_HEADERS_1.SOURCE]: source
};

const event = receiver.accept(headers, data);
Expand All @@ -58,7 +65,7 @@ describe("HTTP Transport Binding Receiver for CloudEvents", () => {
};

const headers = {
[constants.HEADER_CONTENT_TYPE]: constants.MIME_CE_JSON
[HEADER_CONTENT_TYPE]: MIME_CE_JSON
};

const event = receiver.accept(headers, payload);
Expand All @@ -67,17 +74,53 @@ describe("HTTP Transport Binding Receiver for CloudEvents", () => {

it("Binary data returns a CloudEvent", () => {
const headers = {
[constants.HEADER_CONTENT_TYPE]: constants.DEFAULT_CONTENT_TYPE,
[constants.DEFAULT_SPEC_VERSION_HEADER]: specversion,
[constants.BINARY_HEADERS_03.ID]: id,
[constants.BINARY_HEADERS_03.TYPE]: type,
[constants.BINARY_HEADERS_03.SOURCE]: source
[HEADER_CONTENT_TYPE]: DEFAULT_CONTENT_TYPE,
[DEFAULT_SPEC_VERSION_HEADER]: specversion,
[BINARY_HEADERS_03.ID]: id,
[BINARY_HEADERS_03.TYPE]: type,
[BINARY_HEADERS_03.SOURCE]: source
};

const event = receiver.accept(headers, data);
validateEvent(event, specversion);
});
});

describe("Kafka-Knative event source", () => {
const specversion = "1.0";
const id = "partition:1/offset:23";
const type = "dev.knative.kafka.event";
const source =
"/apis/v1/namespaces/kafka/kafkasources/kafka-source#knative-demo-topic";

it("Should be parsable", () => {
const headers = {
host: "event-display.kafka.svc.cluster.local",
"user-agent": "Go-http-client/1.1",
"content-length": "59",
"accept-encoding": "gzip",
"ce-id": id,
"ce-source": source,
"ce-specversion": "1.0",
"ce-subject": "partition:1#23",
"ce-time": "2020-05-07T14:16:30.245Z",
"ce-type": type,
forwarded: "for=10.131.0.72;proto=http",
"k-proxy-request": "activator",
"x-envoy-expected-rq-timeout-ms": "600000",
"x-forwarded-for": "10.131.0.72, 10.128.2.99",
"x-forwarded-proto": "http",
"x-request-id": "d3649c1b-a968-40bf-a9da-3e853abc0c8b"
};
const event = receiver.accept(headers, data);
expect(event instanceof CloudEvent).to.equal(true);
expect(event.getId()).to.equal(id);
expect(event.getType()).to.equal(type);
expect(event.getSource()).to.equal(source);
expect(event.getData()).to.deep.equal(data);
expect(event.getSpecversion()).to.equal(specversion);
});
});
});

function validateEvent(event, specversion) {
Expand Down

0 comments on commit 72a87df

Please sign in to comment.