Spark 之 printSchema

printSchema

spark/sql/core/src/main/scala/org/apache/spark/sql/Dataset.scala

 /**
   * Prints the schema to the console in a nice tree format.
   *
   * @group basic
   * @since 1.6.0
   */
  def printSchema(): Unit = printSchema(Int.MaxValue)

  // scalastyle:off println
  /**
   * Prints the schema up to the given level to the console in a nice tree format.
   *
   * @group basic
   * @since 3.0.0
   */
  def printSchema(level: Int): Unit = println(schema.treeString(level))
  // scalastyle:on println
treeString

spark/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructType.scala

def treeString(maxDepth: Int): String = {
    val stringConcat = new StringUtils.StringConcat()
    stringConcat.append("root\n")
    val prefix = " |"
    val depth = if (maxDepth > 0) maxDepth else Int.MaxValue
    fields.foreach(field => field.buildFormattedString(prefix, stringConcat, depth))
    stringConcat.toString()
  }
buildFormattedString

spark/sql/catalyst/src/main/scala/org/apache/spark/sql/types/StructField.scala

private[sql] def buildFormattedString(
      prefix: String,
      stringConcat: StringConcat,
      maxDepth: Int): Unit = {
    if (maxDepth > 0) {
      stringConcat.append(s"$prefix-- $name: ${dataType.typeName} (nullable = $nullable)\n")
      DataType.buildFormattedString(dataType, s"$prefix    |", stringConcat, maxDepth)
    }
  }

spark/sql/catalyst/src/main/scala/org/apache/spark/sql/types/DataType.scala

protected[types] def buildFormattedString(
      dataType: DataType,
      prefix: String,
      stringConcat: StringConcat,
      maxDepth: Int): Unit = {
    dataType match {
      case array: ArrayType =>
        array.buildFormattedString(prefix, stringConcat, maxDepth - 1)
      case struct: StructType =>
        struct.buildFormattedString(prefix, stringConcat, maxDepth - 1)
      case map: MapType =>
        map.buildFormattedString(prefix, stringConcat, maxDepth - 1)
      case _ =>
    }
  }

spark/sql/catalyst/src/main/scala/org/apache/spark/sql/types/ArrayType.scala

private[sql] def buildFormattedString(
      prefix: String,
      stringConcat: StringConcat,
      maxDepth: Int): Unit = {
    if (maxDepth > 0) {
      stringConcat.append(
        s"$prefix-- element: ${elementType.typeName} (containsNull = $containsNull)\n")
      DataType.buildFormattedString(elementType, s"$prefix    |", stringConcat, maxDepth)
    }
  }

spark/sql/catalyst/src/main/scala/org/apache/spark/sql/types/MapType.scala

private[sql] def buildFormattedString(
      prefix: String,
      stringConcat: StringConcat,
      maxDepth: Int = Int.MaxValue): Unit = {
    if (maxDepth > 0) {
      stringConcat.append(s"$prefix-- key: ${keyType.typeName}\n")
      DataType.buildFormattedString(keyType, s"$prefix    |", stringConcat, maxDepth)
      stringConcat.append(s"$prefix-- value: ${valueType.typeName} " +
        s"(valueContainsNull = $valueContainsNull)\n")
      DataType.buildFormattedString(valueType, s"$prefix    |", stringConcat, maxDepth)
    }
  }
  • 1
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值