-
Notifications
You must be signed in to change notification settings - Fork 73
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Basic support for Confluent's KafkaAvroSerializer / KafkaAvroDeSerializer #9
Merged
mostafa
merged 10 commits into
mostafa:master
from
fmck3516:support-for-confluent-de-serializer
Jun 29, 2021
Merged
Changes from all commits
Commits
Show all changes
10 commits
Select commit
Hold shift + click to select a range
2ec6982
Experimental support for Confluent Cloud
5e3c84e
initial work to support Confluents KafkaAvroSerializer/KafkaAvroDeser…
404d1a4
added schemaRegistry.go to encapsulate logic that deals with Confluen…
6aeeacb
added caching for previous schema lookups
c999281
reduce diff noise
99e965a
Merge branch 'mostafa:master' into support-for-confluent-de-serializer
fmck3516 77effa2
Merge branch 'master' of https://github.com/mostafa/xk6-kafka into su…
5f4c36f
Merge branch 'master' of https://github.com/mostafa/xk6-kafka into su…
5cf791a
PULL-9: review follow-ups
9c687eb
Merge branch 'support-for-confluent-de-serializer' of https://github.…
File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -14,3 +14,4 @@ | |
# Dependency directories (remove the comment below to include it) | ||
vendor/ | ||
k6 | ||
.idea |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,81 @@ | ||
package kafka | ||
|
||
import ( | ||
"encoding/json" | ||
"errors" | ||
) | ||
|
||
type ConsumerConfiguration struct { | ||
KeyDeserializer string `json:"keyDeserializer"` | ||
ValueDeserializer string `json:"valueDeserializer"` | ||
} | ||
|
||
type ProducerConfiguration struct { | ||
KeySerializer string `json:"keySerializer"` | ||
ValueSerializer string `json:"valueSerializer"` | ||
} | ||
|
||
type BasicAuth struct { | ||
CredentialsSource string `json:"credentialsSource"` | ||
UserInfo string `json:"userInfo"` | ||
} | ||
|
||
type SchemaRegistryConfiguration struct { | ||
Url string `json:"url"` | ||
BasicAuth BasicAuth `json:"basicAuth"` | ||
} | ||
|
||
type Configuration struct { | ||
Consumer ConsumerConfiguration `json:"consumer"` | ||
Producer ProducerConfiguration `json:"producer"` | ||
SchemaRegistry SchemaRegistryConfiguration `json:"schemaRegistry"` | ||
} | ||
|
||
func unmarshalConfiguration(jsonConfiguration string) (Configuration, error) { | ||
var configuration Configuration | ||
err := json.Unmarshal([]byte(jsonConfiguration), &configuration) | ||
return configuration, err | ||
} | ||
|
||
func useKafkaAvroDeserializer(configuration Configuration, keyOrValue string) bool { | ||
if (Configuration{}) == configuration || | ||
(ConsumerConfiguration{}) == configuration.Consumer { | ||
return false | ||
} | ||
if keyOrValue == "key" && configuration.Consumer.KeyDeserializer == "io.confluent.kafka.serializers.KafkaAvroDeserializer" || | ||
keyOrValue == "value" && configuration.Consumer.ValueDeserializer == "io.confluent.kafka.serializers.KafkaAvroDeserializer" { | ||
return true | ||
} | ||
return false | ||
} | ||
|
||
func useKafkaAvroSerializer(configuration Configuration, keyOrValue string) bool { | ||
if (Configuration{}) == configuration || | ||
(ProducerConfiguration{}) == configuration.Producer { | ||
return false | ||
} | ||
if keyOrValue == "key" && configuration.Producer.KeySerializer == "io.confluent.kafka.serializers.KafkaAvroSerializer" || | ||
keyOrValue == "value" && configuration.Producer.ValueSerializer == "io.confluent.kafka.serializers.KafkaAvroSerializer" { | ||
return true | ||
} | ||
return false | ||
} | ||
|
||
func useBasicAuthWithCredentialSourceUserInfo(configuration Configuration) bool { | ||
if (Configuration{}) == configuration || | ||
(SchemaRegistryConfiguration{}) == configuration.SchemaRegistry || | ||
(BasicAuth{}) == configuration.SchemaRegistry.BasicAuth { | ||
return false | ||
} | ||
return configuration.SchemaRegistry.BasicAuth.CredentialsSource == "USER_INFO" | ||
} | ||
|
||
func validateConfiguration(configuration Configuration) error { | ||
if useKafkaAvroSerializer(configuration, "key") || useKafkaAvroSerializer(configuration, "value") { | ||
if (SchemaRegistryConfiguration{}) == configuration.SchemaRegistry { | ||
return errors.New("you must provide a value for the \"SchemaRegistry\" configuration property to use a serializer " + | ||
"of type \"io.confluent.kafka.serializers.KafkaAvroSerializer\"") | ||
} | ||
} | ||
return nil | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,91 @@ | ||
package kafka | ||
|
||
import ( | ||
"bytes" | ||
"encoding/json" | ||
"errors" | ||
"fmt" | ||
"github.com/linkedin/goavro/v2" | ||
"io/ioutil" | ||
"net/http" | ||
"strings" | ||
) | ||
|
||
func i32tob(val uint32) []byte { | ||
r := make([]byte, 4) | ||
for i := uint32(0); i < 4; i++ { | ||
r[3 - i] = byte((val >> (8 * i)) & 0xff) | ||
} | ||
return r | ||
} | ||
|
||
// Account for proprietary 5-byte prefix before the Avro payload: | ||
// https://docs.confluent.io/platform/current/schema-registry/serdes-develop/index.html#wire-format | ||
func removeMagicByteAndSchemaIdPrefix(configuration Configuration, messageData []byte, keyOrValue string) []byte { | ||
if useKafkaAvroDeserializer(configuration, keyOrValue) { | ||
return messageData[5:] | ||
} | ||
return messageData | ||
} | ||
|
||
// Add proprietary 5-byte prefix before the Avro payload: | ||
// https://docs.confluent.io/platform/current/schema-registry/serdes-develop/index.html#wire-format | ||
func addMagicByteAndSchemaIdPrefix(configuration Configuration, avroData []byte, topic string, keyOrValue string, schema string) ([]byte, error) { | ||
var schemaId, err = getSchemaId(configuration, topic, keyOrValue, schema) | ||
if err != nil { | ||
ReportError(err, "Retrieval of schema id failed.") | ||
return nil, err | ||
} | ||
if schemaId != 0 { | ||
return append(append([]byte{ 0 }, i32tob(schemaId)...), avroData...), nil | ||
} | ||
return avroData, nil | ||
} | ||
|
||
var schemaIdCache = make(map[string]uint32) | ||
|
||
func getSchemaId(configuration Configuration, topic string, keyOrValue string, schema string) (uint32, error) { | ||
if schemaIdCache[schema] > 0 { | ||
return schemaIdCache[schema], nil | ||
} | ||
if useKafkaAvroSerializer(configuration, keyOrValue) { | ||
url := configuration.SchemaRegistry.Url + "/subjects/" + topic + "-" + keyOrValue + "/versions" | ||
codec, _ := goavro.NewCodec(schema); | ||
|
||
body := "{\"schema\":\"" + strings.Replace(codec.CanonicalSchema(), "\"", "\\\"", -1) + "\"}" | ||
|
||
client := &http.Client{} | ||
req, err := http.NewRequest("POST", url, bytes.NewReader([]byte(body))) | ||
if err != nil { | ||
return 0, err | ||
} | ||
req.Header.Add("Content-Type", "application/vnd.schemaregistry.v1+json") | ||
if useBasicAuthWithCredentialSourceUserInfo(configuration) { | ||
username := strings.Split(configuration.SchemaRegistry.BasicAuth.UserInfo, ":")[0] | ||
password := strings.Split(configuration.SchemaRegistry.BasicAuth.UserInfo, ":")[1] | ||
req.SetBasicAuth(username, password) | ||
} | ||
resp, err := client.Do(req); | ||
if err != nil { | ||
return 0, err | ||
} | ||
if resp.StatusCode >= 400 { | ||
return 0, errors.New(fmt.Sprintf("Retrieval of schema ids failed. Details: Url= %v, body=%v, response=%v", url, body, resp)) | ||
} | ||
defer resp.Body.Close() | ||
bodyBytes, err := ioutil.ReadAll(resp.Body) | ||
if err != nil { | ||
return 0, err | ||
} | ||
|
||
var result map[string]int32 | ||
err = json.Unmarshal(bodyBytes, &result) | ||
if err != nil { | ||
return 0, err; | ||
} | ||
schemaId := uint32(result["id"]) | ||
schemaIdCache[schema] = schemaId | ||
return schemaId, nil | ||
} | ||
return 0, nil | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,102 @@ | ||
/* | ||
This is a k6 test script that imports the xk6-kafka and | ||
tests Kafka with a 100 Avro messages per iteration. | ||
*/ | ||
|
||
import { | ||
check | ||
} from 'k6'; | ||
import { | ||
writer, | ||
reader, | ||
consumeWithConfiguration, | ||
produceWithConfiguration | ||
} from 'k6/x/kafka'; // import kafka extension | ||
|
||
const bootstrapServers = ["subdomain.us-east-1.aws.confluent.cloud:9092"]; | ||
const topic = "com.example.person"; | ||
|
||
const auth = JSON.stringify({ | ||
username: "username", | ||
password: "password", | ||
algorithm: "plain" | ||
}) | ||
|
||
const producer = writer(bootstrapServers, topic, auth); | ||
const consumer = reader(bootstrapServers, topic, null, null, auth); | ||
|
||
const keySchema = `{ | ||
"name": "KeySchema", | ||
"type": "record", | ||
"namespace": "com.example", | ||
"fields": [ | ||
{ | ||
"name": "ssn", | ||
"type": "string" | ||
} | ||
] | ||
} | ||
` | ||
const valueSchema = `{ | ||
"name": "ValueSchema", | ||
"type": "record", | ||
"namespace": "com.example", | ||
"fields": [ | ||
{ | ||
"name": "firstname", | ||
"type": "string" | ||
}, | ||
{ | ||
"name": "lastname", | ||
"type": "string" | ||
} | ||
] | ||
}` | ||
|
||
|
||
var configuration = JSON.stringify({ | ||
consumer: { | ||
keyDeserializer: "io.confluent.kafka.serializers.KafkaAvroDeserializer", | ||
valueDeserializer: "io.confluent.kafka.serializers.KafkaAvroDeserializer", | ||
}, | ||
producer: { | ||
keySerializer: "io.confluent.kafka.serializers.KafkaAvroSerializer", | ||
valueSerializer: "io.confluent.kafka.serializers.KafkaAvroSerializer", | ||
}, | ||
schemaRegistry: { | ||
url: "https://subdomain.us-east-2.aws.confluent.cloud", | ||
basicAuth: { | ||
credentialsSource: "USER_INFO", | ||
userInfo: "KEY:SECRET" | ||
}, | ||
}, | ||
}) | ||
|
||
export default function () { | ||
for (let index = 0; index < 100; index++) { | ||
let messages = [{ | ||
key: JSON.stringify({ | ||
"ssn": "ssn-" + index, | ||
}), | ||
value: JSON.stringify({ | ||
"firstname": "firstname-" + index, | ||
"lastname": "lastname-" + index, | ||
}), | ||
}] | ||
let error = produceWithConfiguration(producer, messages, configuration, keySchema, valueSchema); | ||
check(error, { | ||
"is sent": err => err == undefined | ||
}); | ||
} | ||
|
||
let messages = consumeWithConfiguration(consumer, 20, configuration, keySchema, valueSchema); | ||
check(messages, { | ||
"20 message returned": msgs => msgs.length == 20 | ||
}) | ||
|
||
} | ||
|
||
export function teardown(data) { | ||
producer.close(); | ||
consumer.close(); | ||
} |
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I love this. 👏