Skip to content

Commit

Permalink
Use str(Schema) rather than Schema.to_json to prevent fastavro fr…
Browse files Browse the repository at this point in the history
…om raising exception `TypeError: unhashable type: 'mappingproxy'`

Fixes confluentinc#1156
  • Loading branch information
ffissore committed Dec 15, 2021
1 parent fe35fa9 commit 0c02ff5
Showing 1 changed file with 4 additions and 3 deletions.
7 changes: 4 additions & 3 deletions src/confluent_kafka/avro/serializer/message_serializer.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
# derived from https://github.com/verisign/python-confluent-schemaregistry.git
#
import io
import json
import logging
import struct
import sys
Expand Down Expand Up @@ -79,7 +80,7 @@ def __init__(self, registry_client, reader_key_schema=None, reader_value_schema=
# Encoder support
def _get_encoder_func(self, writer_schema):
if HAS_FAST:
schema = writer_schema.to_json()
schema = json.loads(str(writer_schema))
parsed_schema = parse_schema(schema)
return lambda record, fp: schemaless_writer(fp, parsed_schema, record)
writer = avro.io.DatumWriter(writer_schema)
Expand Down Expand Up @@ -175,8 +176,8 @@ def _get_decoder_func(self, schema_id, payload, is_key=False):
if HAS_FAST:
# try to use fast avro
try:
fast_avro_writer_schema = parse_schema(writer_schema_obj.to_json())
fast_avro_reader_schema = parse_schema(reader_schema_obj.to_json())
fast_avro_writer_schema = parse_schema(json.loads(str(writer_schema_obj)))
fast_avro_reader_schema = parse_schema(json.loads(str(reader_schema_obj)))
schemaless_reader(payload, fast_avro_writer_schema)

# If we reach this point, this means we have fastavro and it can
Expand Down

0 comments on commit 0c02ff5

Please sign in to comment.