|NO.Z.00030|——————————|BigDataEnd|——|Hadoop&kafka.V15|——|kafka.v15|反序列化剖析|
一、反序列化剖析
### --- 反序列化
~~~     Kafka的broker中所有的消息都是字节数组,消费者获取到消息之后,
~~~     需要先对消息进行反序列化处理,然后才能交给用户程序消费处理。### --- 消费者的反序列化器包括key的和value的反序列化器。
key.deserializer
value.deserializer
IntegerDeserializer
StringDeserializer### --- 需要实现org.apache.kafka.common.serialization.Deserializer<T> 接口。
~~~     消费者从订阅的主题拉取消息:
~~~     consumer.poll(3_000);### --- 在Fetcher类中,对拉取到的消息首先进行反序列化处理。
    private ConsumerRecord<K, V> parseRecord(TopicPartition partition,
                                             RecordBatch batch,
                                             Record record) {
        try {
            long offset = record.offset();
            long timestamp = record.timestamp();
            TimestampType timestampType = batch.timestampType();
            Headers headers = new RecordHeaders(record.headers());
            ByteBuffer keyBytes = record.key();
            byte[] keyByteArray = keyBytes == null ? null : Utils.toArray(keyBytes);
            K key = keyBytes == null ? null : this.keyDeserializer.deserialize(partition.topic(), headers, keyByteArray);
            ByteBuffer valueBytes = record.value();
            byte[] valueByteArray = valueBytes == null ? null : Utils.toArray(valueBytes);
            V value = valueBytes == null ? null : this.valueDeserializer.deserialize(partition.topic(), headers, valueByteArray);
            return new ConsumerRecord<>(partition.topic(), partition.partition(), offset,
                                        timestamp, timestampType, record.checksumOrNull(),
                                        keyByteArray == null ? ConsumerRecord.NULL_SIZE : keyByteArray.length,
                                        valueByteArray == null ? ConsumerRecord.NULL_SIZE : valueByteArray.length,
                                        key, value, headers);
        } catch (RuntimeException e) {
            throw new SerializationException("Error deserializing key/value for partition " + partition +
                    " at offset " + record.offset() + ". If needed, please seek past the record to continue consumption.", e);
        }
    }二、kafka默认提供的几个反序列化实现
### --- Kafka默认提供了几个反序列化的实现:
~~~     org.apache.kafka.common.serialization 包下包含了这几个实现:### --- ByteArrayDeserializer
package org.apache.kafka.common.serialization;
import java.util.Map;
public class ByteArrayDeserializer implements Deserializer<byte[]> {
    @Override
    public void configure(Map<String, ?> configs, boolean isKey) {
        // nothing to do
    }
    @Override
    public byte[] deserialize(String topic, byte[] data) {
        return data;
    }
    @Override
    public void close() {
        // nothing to do
    }
}### --- ByteBufferDeserializer
package org.apache.kafka.common.serialization;
import java.nio.ByteBuffer;
import java.util.Map;
public class ByteBufferDeserializer implements Deserializer<ByteBuffer> {
    public void configure(Map<String, ?> configs, boolean isKey) {
        // nothing to do
    }
    public ByteBuffer deserialize(String topic, byte[] data) {
        if (data == null)
            return null;
        return ByteBuffer.wrap(data);
    }
    public void close() {
        // nothing to do
    }
}### --- BytesDeserializer
package org.apache.kafka.common.serialization;
import org.apache.kafka.common.utils.Bytes;
import java.util.Map;
public class BytesDeserializer implements Deserializer<Bytes> {
    public void configure(Map<String, ?> configs, boolean isKey) {
        // nothing to do
    }
    public Bytes deserialize(String topic, byte[] data) {
        if (data == null)
            return null;
        return new Bytes(data);
    }
    public void close() {
        // nothing to do
    }
}### --- DoubleDeserializer
package org.apache.kafka.common.serialization;
import org.apache.kafka.common.errors.SerializationException;
import java.util.Map;
public class DoubleDeserializer implements Deserializer<Double> {
    @Override
    public void configure(Map<String, ?> configs, boolean isKey) {
        // nothing to do
    }
    @Override
    public Double deserialize(String topic, byte[] data) {
        if (data == null)
            return null;
        if (data.length != 8) {
            throw new SerializationException("Size of data received by Deserializer is not 8");
        }
        long value = 0;
        for (byte b : data) {
            value <<= 8;
            value |= b & 0xFF;
        }
        return Double.longBitsToDouble(value);
    }
    @Override
    public void close() {
        // nothing to do
    }
}### --- FloatDeserializer
package org.apache.kafka.common.serialization;
import org.apache.kafka.common.errors.SerializationException;
import java.util.Map;
public class FloatDeserializer implements Deserializer<Float> {
    @Override
    public void configure(final Map<String, ?> configs, final boolean isKey) {
        // nothing to do
    }
    @Override
    public Float deserialize(final String topic, final byte[] data) {
        if (data == null)
            return null;
        if (data.length != 4) {
            throw new SerializationException("Size of data received by Deserializer is not 4");
        }
        int value = 0;
        for (byte b : data) {
            value <<= 8;
            value |= b & 0xFF;
        }
        return Float.intBitsToFloat(value);
    }
    @Override
    public void close() {
        // nothing to do
    }
}### --- IntegerDeserializer
package org.apache.kafka.common.serialization;
import org.apache.kafka.common.errors.SerializationException;
import java.util.Map;
public class IntegerDeserializer implements Deserializer<Integer> {
    public void configure(Map<String, ?> configs, boolean isKey) {
        // nothing to do
    }
    public Integer deserialize(String topic, byte[] data) {
        if (data == null)
            return null;
        if (data.length != 4) {
            throw new SerializationException("Size of data received by IntegerDeserializer is not 4");
        }
        int value = 0;
        for (byte b : data) {
            value <<= 8;
            value |= b & 0xFF;
        }
        return value;
    }
    public void close() {
        // nothing to do
    }
}### --- LongDeserializer
package org.apache.kafka.common.serialization;
import org.apache.kafka.common.errors.SerializationException;
import java.util.Map;
public class LongDeserializer implements Deserializer<Long> {
    public void configure(Map<String, ?> configs, boolean isKey) {
        // nothing to do
    }
    public Long deserialize(String topic, byte[] data) {
        if (data == null)
            return null;
        if (data.length != 8) {
            throw new SerializationException("Size of data received by LongDeserializer is not 8");
        }
        long value = 0;
        for (byte b : data) {
            value <<= 8;
            value |= b & 0xFF;
        }
        return value;
    }
    public void close() {
        // nothing to do
    }
}### --- ShortDeserializer
package org.apache.kafka.common.serialization;
import org.apache.kafka.common.errors.SerializationException;
import java.util.Map;
public class ShortDeserializer implements Deserializer<Short> {
    public void configure(Map<String, ?> configs, boolean isKey) {
        // nothing to do
    }
    public Short deserialize(String topic, byte[] data) {
        if (data == null)
            return null;
        if (data.length != 2) {
            throw new SerializationException("Size of data received by ShortDeserializer is not 2");
        }
        short value = 0;
        for (byte b : data) {
            value <<= 8;
            value |= b & 0xFF;
        }
        return value;
    }
    public void close() {
        // nothing to do
    }
}### --- StringDeserializer
package org.apache.kafka.common.serialization;
import org.apache.kafka.common.errors.SerializationException;
import java.io.UnsupportedEncodingException;
import java.util.Map;
/**
 *  String encoding defaults to UTF8 and can be customized by setting the property key.deserializer.encoding,
 *  value.deserializer.encoding or deserializer.encoding. The first two take precedence over the last.
 */
public class StringDeserializer implements Deserializer<String> {
    private String encoding = "UTF8";
    @Override
    public void configure(Map<String, ?> configs, boolean isKey) {
        String propertyName = isKey ? "key.deserializer.encoding" : "value.deserializer.encoding";
        Object encodingValue = configs.get(propertyName);
        if (encodingValue == null)
            encodingValue = configs.get("deserializer.encoding");
        if (encodingValue != null && encodingValue instanceof String)
            encoding = (String) encodingValue;
    }
    @Override
    public String deserialize(String topic, byte[] data) {
        try {
            if (data == null)
                return null;
            else
                return new String(data, encoding);
        } catch (UnsupportedEncodingException e) {
            throw new SerializationException("Error when deserializing byte[] to string due to unsupported encoding " + encoding);
        }
    }
    @Override
    public void close() {
        // nothing to do
    }
}Walter Savage Landor:strove with none,for none was worth my strife.Nature I loved and, next to Nature, Art:I warm'd both hands before the fire of life.It sinks, and I am ready to depart
                                                                                                                                                   ——W.S.Landor
 
                    
                     
                    
                 
                    
                 
                
            
         浙公网安备 33010602011771号
浙公网安备 33010602011771号 
