001package org.xbib.elasticsearch.transport.netty;
002
003import org.elasticsearch.common.unit.ByteSizeValue;
004import org.elasticsearch.monitor.jvm.JvmInfo;
005import org.jboss.netty.buffer.ChannelBuffer;
006import org.jboss.netty.channel.Channel;
007import org.jboss.netty.channel.ChannelHandlerContext;
008import org.jboss.netty.handler.codec.frame.FrameDecoder;
009import org.jboss.netty.handler.codec.frame.TooLongFrameException;
010
011import java.io.StreamCorruptedException;
012
013/**
014 */
015public class SizeHeaderFrameDecoder extends FrameDecoder {
016
017    private static final long NINETY_PER_HEAP_SIZE = (long) (JvmInfo.jvmInfo().mem().heapMax().bytes() * 0.9);
018
019    @Override
020    protected Object decode(ChannelHandlerContext ctx, Channel channel, ChannelBuffer buffer) throws Exception {
021        if (buffer.readableBytes() < 4) {
022            return null;
023        }
024
025        int dataLen = buffer.getInt(buffer.readerIndex());
026        if (dataLen <= 0) {
027            throw new StreamCorruptedException("invalid data length: " + dataLen);
028        }
029        // safety against too large frames being sent
030        if (dataLen > NINETY_PER_HEAP_SIZE) {
031            throw new TooLongFrameException(
032                    "transport content length received [" + new ByteSizeValue(dataLen) + "] exceeded [" + new ByteSizeValue(NINETY_PER_HEAP_SIZE) + "]");
033        }
034
035        if (buffer.readableBytes() < dataLen + 4) {
036            return null;
037        }
038        buffer.skipBytes(4);
039        return buffer;
040    }
041}