protected int getCompressedData()

in src/main/java/com/hadoop/compression/lzo/LzopInputStream.java [283:329]


  protected int getCompressedData() throws IOException {
    checkStream();
    verifyChecksums();

    // Get the size of the compressed chunk
    int compressedLen = readInt(in, buf, 4);
    noCompressedBytes += 4;

    if (compressedLen > LzoCodec.MAX_BLOCK_SIZE) {
      throw new IOException("Compressed length " + compressedLen +
        " exceeds max block size " + LzoCodec.MAX_BLOCK_SIZE +
        " (probably corrupt file)");
    }

    LzopDecompressor ldecompressor = (LzopDecompressor)decompressor;
    // If the lzo compressor compresses a block of data, and that compression
    // actually makes the block larger, it writes the block as uncompressed instead.
    // In this case, the compressed size and the uncompressed size in the header
    // are identical, and there is NO compressed checksum written.
    ldecompressor.setCurrentBlockUncompressed(compressedLen >= uncompressedBlockSize);

    for (DChecksum chk : dcheck.keySet()) {
      dcheck.put(chk, readInt(in, buf, 4));
      noCompressedBytes += 4;
    }

    if (!ldecompressor.isCurrentBlockUncompressed()) {
      for (CChecksum chk : ccheck.keySet()) {
        ccheck.put(chk, readInt(in, buf, 4));
        noCompressedBytes += 4;
      }
    }

    ldecompressor.resetChecksum();

    // Read len bytes from underlying stream
    if (compressedLen > buffer.length) {
      buffer = new byte[compressedLen];
    }
    readFully(in, buffer, 0, compressedLen);
    noCompressedBytes += compressedLen;

    // Send the read data to the decompressor.
    ldecompressor.setInput(buffer, 0, compressedLen);

    return compressedLen;
  }