src/main/scala/org/apache/spark/sql/execution/SparkPlanInfo.scala
new SparkPlanInfo(
plan.nodeName,
plan.simpleString(SQLConf.get.maxToStringFields),
children.map(fromSparkPlan),
metadata,
metrics)
src/main/scala/org/apache/spark/sql/catalyst/plans/QueryPlan.scala
override def simpleString(maxFields: Int): String = statePrefix + super.simpleString(maxFields)
src/main/scala/org/apache/spark/sql/catalyst/trees/TreeNode.scala
/**
* ONE line description of this node.
* @param maxFields Maximum number of fields that will be converted to strings.
* Any elements beyond the limit will be dropped.
*/
def simpleString(maxFields: Int): String = s"$nodeName ${argString(maxFields)}".trim
/** Returns a string representing the arguments to this node, minus any children */
def argString(maxFields: Int): String = stringArgs.flatMap {
case tn: TreeNode[_] if allChildren.contains(tn) => Nil
case Some(tn: TreeNode[_]) if allChildren.contains(tn) => Nil
case Some(tn: TreeNode[_]) => tn.simpleString(maxFields) :: Nil
case tn: TreeNode[_] => tn.simpleString(maxFields) :: Nil
case seq: Seq[Any] if seq.toSet.subsetOf(allChildren.asInstanceOf[Set[Any]]) => Nil
case iter: Iterable[_] if iter.isEmpty => Nil
case array: Array[_] if array.isEmpty => Nil
case xs @ (_: Seq[_] | _: Set[_] | _: Array[_]) =>
formatArg(xs, maxFields) :: Nil
case null => Nil
case None => Nil
case Some(null) => Nil
case Some(table: CatalogTable) =>
stringArgsForCatalogTable(table)
case Some(any) => any :: Nil
case map: CaseInsensitiveStringMap =>
redactMapString(map.asCaseSensitiveMap().asScala, maxFields)
case map: Map[_, _] =>
redactMapString(map, maxFields)
case t: TableSpec =>
t.copy(properties = Utils.redact(t.properties).toMap,
options = Utils.redact(t.options).toMap) :: Nil
case table: CatalogTable =>
stringArgsForCatalogTable(table)
case other => other :: Nil
}.mkString(", ")
src/main/scala/org/apache/spark/sql/catalyst/expressions/namedExpressions.scala
override def toString: String = s"$child AS $name#${exprId.id}$typeSuffix$delaySuffix"