private def collapseTokens()

in src/main/scala/com/twitter/penguin/korean/tokenizer/KoreanDetokenizer.scala [43:56]


  private def collapseTokens(tokenized: Seq[KoreanToken]): List[String] = {
    val (output, isPrefix) = tokenized.foldLeft((List[String](), false)) {
      case ((output: List[String], isPrefix: Boolean), token: KoreanToken) =>
        if (output.nonEmpty && (isPrefix || SuffixPos.contains(token.pos))) {
          val attached = output.lastOption.getOrElse("") + token.text
          (output.init :+ attached, false)
        } else if (PrefixPos.contains(token.pos)) {
          (output :+ token.text, true)
        } else {
          (output :+ token.text, false)
        }
    }
    output
  }