in parquet/src/main/scala/magnolify/parquet/ParquetType.scala [126:157]
def this() = this(null)
override def init(context: hadoop.InitContext): hadoop.ReadSupport.ReadContext = {
if (parquetType == null) {
// Use deprecated getConfiguration
// Recommended getParquetConfiguration is only available for parquet 1.14+
val readKeyType = context.getConfiguration.get(ReadTypeKey): @nowarn("cat=deprecation")
parquetType = SerializationUtils.fromBase64[ParquetType[T]](readKeyType)
}
val metadata = context.getKeyValueMetadata
val model = metadata.get(ParquetWriter.OBJECT_MODEL_NAME_PROP)
val isAvroFile = (model != null && model.contains("avro")) ||
metadata.containsKey(AVRO_SCHEMA_METADATA_KEY) ||
metadata.containsKey(OLD_AVRO_SCHEMA_METADATA_KEY)
if (isAvroFile && !parquetType.avroCompat) {
logger.warn(
"Parquet file was written from Avro records, " +
"`import magnolify.parquet.ParquetArray.AvroCompat._` to read correctly"
)
}
if (!isAvroFile && parquetType.avroCompat) {
logger.warn(
"Parquet file was not written from Avro records, " +
"remove `import magnolify.parquet.ParquetArray.AvroCompat._` to read correctly"
)
}
val requestedSchema = Schema.message(parquetType.schema)
Schema.checkCompatibility(context.getFileSchema, requestedSchema)
new hadoop.ReadSupport.ReadContext(requestedSchema, java.util.Collections.emptyMap())
}