in scio-google-cloud-platform/src/main/scala/com/spotify/scio/bigquery/StorageUtil.scala [66:118]
private def setRawType(tableField: TableFieldSchema, schema: Schema): Unit = {
val tpe = schema.getType match {
case Type.BOOLEAN => "BOOLEAN"
case Type.LONG =>
schema.getLogicalType match {
case null => "INT64"
case t if t.getName == "timestamp-micros" => "TIMESTAMP"
case t if t.getName == "time-micros" => "TIME"
case t =>
throw new IllegalStateException(s"Unsupported logical type: $t")
}
case Type.DOUBLE => "FLOAT64"
case Type.BYTES =>
schema.getLogicalType match {
case null => "BYTES"
case t if t.getName == "decimal" =>
val precision = schema.getObjectProp("precision").asInstanceOf[Int]
val scale = schema.getObjectProp("scale").asInstanceOf[Int]
(precision, scale) match {
case (38, 9) => "NUMERIC"
case (77, 38) => "BIGNUMERIC"
case _ =>
throw new IllegalStateException(
s"Unsupported decimal precision and scale: ($precision, $scale)"
)
}
case t =>
throw new IllegalStateException(s"Unsupported logical type: $t")
}
case Type.INT =>
schema.getLogicalType match {
case t if t.getName == "date" => "DATE"
case t => s"Unsupported logical type: $t"
}
case Type.STRING =>
// FIXME: schema.getLogicalType == null in this case, BigQuery service side bug?
val logicalType = schema.getProp("logicalType")
val sqlType = schema.getProp("sqlType")
(logicalType, sqlType) match {
case ("datetime", _) => "DATETIME"
case (_, "GEOGRAPHY") => "GEOGRAPHY"
case (_, "JSON") => "JSON"
case _ => "STRING"
}
case Type.RECORD =>
tableField.setFields(getFieldSchemas(schema).asJava)
"RECORD"
case t =>
throw new IllegalStateException(s"Unsupported type: $t")
}
tableField.setType(tpe)
()
}