<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic column wise sum in PySpark dataframe in Data Engineering</title>
    <link>https://community.databricks.com/t5/data-engineering/column-wise-sum-in-pyspark-dataframe/m-p/28152#M19975</link>
    <description>&lt;P&gt;&lt;/P&gt;
&lt;P&gt;i have a dataframe of 18000000rows and 1322 column with '0' and '1' value.&lt;/P&gt;
&lt;P&gt;want to find how many '1's are in every column ???&lt;/P&gt;
&lt;P&gt;below is DataSet&lt;/P&gt;
&lt;P&gt;&lt;/P&gt; 
&lt;P&gt; se_00001 se_00007 se_00036 se_00100 se_0010p se_00250&lt;/P&gt;</description>
    <pubDate>Mon, 22 Apr 2019 08:36:13 GMT</pubDate>
    <dc:creator>siddhu308</dc:creator>
    <dc:date>2019-04-22T08:36:13Z</dc:date>
    <item>
      <title>column wise sum in PySpark dataframe</title>
      <link>https://community.databricks.com/t5/data-engineering/column-wise-sum-in-pyspark-dataframe/m-p/28152#M19975</link>
      <description>&lt;P&gt;&lt;/P&gt;
&lt;P&gt;i have a dataframe of 18000000rows and 1322 column with '0' and '1' value.&lt;/P&gt;
&lt;P&gt;want to find how many '1's are in every column ???&lt;/P&gt;
&lt;P&gt;below is DataSet&lt;/P&gt;
&lt;P&gt;&lt;/P&gt; 
&lt;P&gt; se_00001 se_00007 se_00036 se_00100 se_0010p se_00250&lt;/P&gt;</description>
      <pubDate>Mon, 22 Apr 2019 08:36:13 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/column-wise-sum-in-pyspark-dataframe/m-p/28152#M19975</guid>
      <dc:creator>siddhu308</dc:creator>
      <dc:date>2019-04-22T08:36:13Z</dc:date>
    </item>
    <item>
      <title>Re: column wise sum in PySpark dataframe</title>
      <link>https://community.databricks.com/t5/data-engineering/column-wise-sum-in-pyspark-dataframe/m-p/28153#M19976</link>
      <description>&lt;P&gt; seag_00001 seag_00007 seag_00036 seag_00100 seag_0010p seag_00250 &lt;/P&gt;
&lt;P&gt;1 0 1 0 0 0&lt;/P&gt; 
&lt;P&gt;&lt;/P&gt;</description>
      <pubDate>Mon, 22 Apr 2019 08:37:04 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/column-wise-sum-in-pyspark-dataframe/m-p/28153#M19976</guid>
      <dc:creator>siddhu308</dc:creator>
      <dc:date>2019-04-22T08:37:04Z</dc:date>
    </item>
    <item>
      <title>Re: column wise sum in PySpark dataframe</title>
      <link>https://community.databricks.com/t5/data-engineering/column-wise-sum-in-pyspark-dataframe/m-p/28154#M19977</link>
      <description>&lt;P&gt;&lt;/P&gt;
&lt;P&gt; Hi Siddhu, &lt;/P&gt; 
&lt;P&gt; You can use &lt;/P&gt; 
&lt;PRE&gt;&lt;CODE&gt;
df.select(sum("col1"), sum("col2"), sum("col3"))
where col1, col2, col3 are the column names for which you would like to find the sum
&lt;/CODE&gt;&lt;/PRE&gt; 
&lt;P&gt; please let us know if it answers your question &lt;/P&gt; 
&lt;P&gt; Thanks &lt;/P&gt; 
&lt;P&gt;&lt;/P&gt;</description>
      <pubDate>Tue, 23 Apr 2019 14:41:14 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/column-wise-sum-in-pyspark-dataframe/m-p/28154#M19977</guid>
      <dc:creator>mathan_pillai</dc:creator>
      <dc:date>2019-04-23T14:41:14Z</dc:date>
    </item>
  </channel>
</rss>

