def predict()

in basic_pitch/inference.py [0:0]


    def predict(self, x: npt.NDArray[np.float32]) -> Dict[str, npt.NDArray[np.float32]]:
        if self.model_type == Model.MODEL_TYPES.TENSORFLOW:
            return {k: v.numpy() for k, v in cast(tf.keras.Model, self.model(x)).items()}
        elif self.model_type == Model.MODEL_TYPES.COREML:
            print(f"isfinite: {np.all(np.isfinite(x))}", flush=True)
            print(f"shape: {x.shape}", flush=True)
            print(f"dtype: {x.dtype}", flush=True)
            result = cast(ct.models.MLModel, self.model).predict({"input_2": x})
            return {
                "note": result["Identity_1"],
                "onset": result["Identity_2"],
                "contour": result["Identity"],
            }
        elif self.model_type == Model.MODEL_TYPES.TFLITE:
            return self.model(input_2=x)  # type: ignore
        elif self.model_type == Model.MODEL_TYPES.ONNX:
            return {
                k: v
                for k, v in zip(
                    ["note", "onset", "contour"],
                    cast(ort.InferenceSession, self.model).run(
                        [
                            "StatefulPartitionedCall:1",
                            "StatefulPartitionedCall:2",
                            "StatefulPartitionedCall:0",
                        ],
                        {"serving_default_input_2:0": x},
                    ),
                )
            }