Github user cloud-fan commented on a diff in the pull request:

    https://github.com/apache/spark/pull/22666#discussion_r226641023
  
    --- Diff: sql/core/src/main/scala/org/apache/spark/sql/functions.scala ---
    @@ -3886,6 +3886,31 @@ object functions {
         withExpr(new CsvToStructs(e.expr, schema.expr, options.asScala.toMap))
       }
     
    +  /**
    +   * Parses a column containing a CSV string and infers its schema.
    +   *
    +   * @param e a string column containing CSV data.
    +   *
    +   * @group collection_funcs
    +   * @since 3.0.0
    +   */
    +  def schema_of_csv(e: Column): Column = withExpr(new SchemaOfCsv(e.expr))
    +
    +  /**
    +   * Parses a column containing a CSV string and infers its schema using 
options.
    +   *
    +   * @param e a string column containing CSV data.
    +   * @param options options to control how the CSV is parsed. accepts the 
same options and the
    +   *                json data source. See [[DataFrameReader#csv]].
    +   * @return a column with string literal containing schema in DDL format.
    +   *
    +   * @group collection_funcs
    +   * @since 3.0.0
    +   */
    +  def schema_of_csv(e: Column, options: java.util.Map[String, String]): 
Column = {
    --- End diff --
    
    shall we have an API with scala Map?


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to