in parquet/src/main/scala/magnolify/parquet/ParquetType.scala [173:199]
def this() = this(null)
override def getName: String = "magnolify"
private var recordConsumer: RecordConsumer = null
override def init(configuration: Configuration): hadoop.WriteSupport.WriteContext = {
if (parquetType == null) {
parquetType = SerializationUtils.fromBase64[ParquetType[T]](configuration.get(WriteTypeKey))
}
val schema = Schema.message(parquetType.schema)
val metadata = new java.util.HashMap[String, String]()
if (parquetType.avroCompat) {
// This overrides `WriteSupport#getName`
metadata.put(ParquetWriter.OBJECT_MODEL_NAME_PROP, "avro")
metadata.put(AVRO_SCHEMA_METADATA_KEY, parquetType.avroSchema.toString())
} else {
logger.warn(
"Parquet file is being written with no avro compatibility, this mode is not " +
"producing schema. Add `import magnolify.parquet.ParquetArray.AvroCompat._` to " +
"generate schema"
)
}
new hadoop.WriteSupport.WriteContext(schema, metadata)
}