<?xml version="1.0" encoding="UTF-8"?>
<rss xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:taxo="http://purl.org/rss/1.0/modules/taxonomy/" version="2.0">
  <channel>
    <title>topic Re: Spark + Python - Java gateway process exited before sending the driver its port number? in Data Engineering</title>
    <link>https://community.databricks.com/t5/data-engineering/spark-python-java-gateway-process-exited-before-sending-the/m-p/30264#M21918</link>
    <description>&lt;P&gt;&lt;/P&gt;
&lt;P&gt;This is a working example running on wsgi,&lt;/P&gt;import os import sys
&lt;P&gt;&lt;/P&gt; 
&lt;B&gt; Path for spark source folder&lt;/B&gt; 
&lt;P&gt;s.environ['SPARK_HOME'] = "C:\Apache\spark-1.4.1"&lt;/P&gt; 
&lt;B&gt; Append pyspark to Python Path&lt;/B&gt; 
&lt;P&gt;ys.path.append("C:\Apache\spark-1.4.1\python")&lt;/P&gt; 
&lt;P&gt;from pyspark import SparkContext from pyspark import SparkConf from pyspark.sql import SQLContext&lt;/P&gt; 
&lt;B&gt; This is our application object. It could have any name,&lt;/B&gt; 
&lt;P&gt; except when using mod_wsgi where it must be "application" def application(environ, start_response):&lt;/P&gt; 
&lt;PRE&gt;&lt;CODE&gt; # Initialize SparkContext
 sc = SparkContext('local')
 words = sc.parallelize(["scala","java","hadoop","spark","akka"])
 count = words.count()
 #print count

 sc.stop()

 response_body = "Successfully imported Spark Modules and the total words are: " + str(count)

 # HTTP response code and message
 status = '200 OK'

 # These are HTTP headers expected by the client.
 # They must be wrapped as a list of tupled pairs:
 # [(Header name, Header value)].
 response_headers = [('Content-Type', 'text/plain'),
                    ('Content-Length', str(len(response_body)))]

 # Send them to the server using the supplied function
 start_response(status, response_headers)

 # Return the response body.
 # Notice it is wrapped in a list although it could be any iterable.
 return [response_body]&amp;lt;/pre&amp;gt;
&lt;/CODE&gt;&lt;/PRE&gt;</description>
    <pubDate>Thu, 27 Aug 2015 13:03:02 GMT</pubDate>
    <dc:creator>lau_thiamkok</dc:creator>
    <dc:date>2015-08-27T13:03:02Z</dc:date>
    <item>
      <title>Spark + Python - Java gateway process exited before sending the driver its port number?</title>
      <link>https://community.databricks.com/t5/data-engineering/spark-python-java-gateway-process-exited-before-sending-the/m-p/30261#M21915</link>
      <description>&lt;P&gt;&lt;/P&gt;
&lt;P&gt;Why do I get this error on my browser screen, &lt;/P&gt;
&lt;PRE&gt;&lt;CODE&gt;&amp;lt;type 'exceptions.Exception'&amp;gt;: Java gateway process exited before sending the driver its port number args = ('Java gateway process exited before sending the driver its port number',) message = 'Java gateway process exited before sending the driver its port number'&lt;/CODE&gt;&lt;/PRE&gt;
&lt;P&gt;For,&lt;/P&gt;
&lt;PRE&gt;&lt;CODE&gt;#!/Python27/python
print "Content-type: text/html; charset=utf-8"
print
# enable debugging
import cgitb
cgitb.enable()
import os
import sys
# Path for spark source folder
os.environ['SPARK_HOME'] = "C:\Apache\spark-1.4.1"
# Append pyspark to Python Path
sys.path.append("C:\Apache\spark-1.4.1\python")
from pyspark import SparkContext
from pyspark import SparkConf
from pyspark.sql import SQLContext
print ("Successfully imported Spark Modules")
# Initialize SparkContext
sc = SparkContext('local')
words = sc.parallelize(["scala","java","hadoop","spark","akka"])
print words.count()
sc.stop()&lt;/CODE&gt;&lt;/PRE&gt;
&lt;P&gt;I'm on Windows7. My Spark version is spark-1.4.1-bin-hadoop2.6.tgz - prebuild for haddop 2.6 and later.&lt;/P&gt;
&lt;P&gt;Any ideas how I can fix it?&lt;/P&gt;
&lt;P&gt;&lt;/P&gt;</description>
      <pubDate>Wed, 05 Aug 2015 08:28:05 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/spark-python-java-gateway-process-exited-before-sending-the/m-p/30261#M21915</guid>
      <dc:creator>lau_thiamkok</dc:creator>
      <dc:date>2015-08-05T08:28:05Z</dc:date>
    </item>
    <item>
      <title>Re: Spark + Python - Java gateway process exited before sending the driver its port number?</title>
      <link>https://community.databricks.com/t5/data-engineering/spark-python-java-gateway-process-exited-before-sending-the/m-p/30262#M21916</link>
      <description>&lt;P&gt;&lt;/P&gt;
&lt;P&gt;I am facing the same problem....:( Have anybody found any solution?&lt;/P&gt;
&lt;P&gt;&lt;/P&gt;</description>
      <pubDate>Thu, 27 Aug 2015 12:54:08 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/spark-python-java-gateway-process-exited-before-sending-the/m-p/30262#M21916</guid>
      <dc:creator>jmdvinodjmd</dc:creator>
      <dc:date>2015-08-27T12:54:08Z</dc:date>
    </item>
    <item>
      <title>Re: Spark + Python - Java gateway process exited before sending the driver its port number?</title>
      <link>https://community.databricks.com/t5/data-engineering/spark-python-java-gateway-process-exited-before-sending-the/m-p/30263#M21917</link>
      <description>&lt;P&gt;&lt;/P&gt;
&lt;P&gt;I got it fix by running the script on apache mod wsgi. DO NOT run it on cgi !!&lt;/P&gt;
&lt;P&gt;&lt;/P&gt;</description>
      <pubDate>Thu, 27 Aug 2015 13:00:27 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/spark-python-java-gateway-process-exited-before-sending-the/m-p/30263#M21917</guid>
      <dc:creator>lau_thiamkok</dc:creator>
      <dc:date>2015-08-27T13:00:27Z</dc:date>
    </item>
    <item>
      <title>Re: Spark + Python - Java gateway process exited before sending the driver its port number?</title>
      <link>https://community.databricks.com/t5/data-engineering/spark-python-java-gateway-process-exited-before-sending-the/m-p/30264#M21918</link>
      <description>&lt;P&gt;&lt;/P&gt;
&lt;P&gt;This is a working example running on wsgi,&lt;/P&gt;import os import sys
&lt;P&gt;&lt;/P&gt; 
&lt;B&gt; Path for spark source folder&lt;/B&gt; 
&lt;P&gt;s.environ['SPARK_HOME'] = "C:\Apache\spark-1.4.1"&lt;/P&gt; 
&lt;B&gt; Append pyspark to Python Path&lt;/B&gt; 
&lt;P&gt;ys.path.append("C:\Apache\spark-1.4.1\python")&lt;/P&gt; 
&lt;P&gt;from pyspark import SparkContext from pyspark import SparkConf from pyspark.sql import SQLContext&lt;/P&gt; 
&lt;B&gt; This is our application object. It could have any name,&lt;/B&gt; 
&lt;P&gt; except when using mod_wsgi where it must be "application" def application(environ, start_response):&lt;/P&gt; 
&lt;PRE&gt;&lt;CODE&gt; # Initialize SparkContext
 sc = SparkContext('local')
 words = sc.parallelize(["scala","java","hadoop","spark","akka"])
 count = words.count()
 #print count

 sc.stop()

 response_body = "Successfully imported Spark Modules and the total words are: " + str(count)

 # HTTP response code and message
 status = '200 OK'

 # These are HTTP headers expected by the client.
 # They must be wrapped as a list of tupled pairs:
 # [(Header name, Header value)].
 response_headers = [('Content-Type', 'text/plain'),
                    ('Content-Length', str(len(response_body)))]

 # Send them to the server using the supplied function
 start_response(status, response_headers)

 # Return the response body.
 # Notice it is wrapped in a list although it could be any iterable.
 return [response_body]&amp;lt;/pre&amp;gt;
&lt;/CODE&gt;&lt;/PRE&gt;</description>
      <pubDate>Thu, 27 Aug 2015 13:03:02 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/spark-python-java-gateway-process-exited-before-sending-the/m-p/30264#M21918</guid>
      <dc:creator>lau_thiamkok</dc:creator>
      <dc:date>2015-08-27T13:03:02Z</dc:date>
    </item>
    <item>
      <title>Re: Spark + Python - Java gateway process exited before sending the driver its port number?</title>
      <link>https://community.databricks.com/t5/data-engineering/spark-python-java-gateway-process-exited-before-sending-the/m-p/30265#M21919</link>
      <description>&lt;P&gt;&lt;/P&gt;
&lt;P&gt;HI we are usinh anaconda +CDH , pyspark works well but using Ipython gives&lt;/P&gt;
&lt;P&gt;&lt;B&gt;Java gateway process exited before sending the driver its port number&lt;/B&gt;&lt;/P&gt;
&lt;P&gt;&lt;/P&gt;</description>
      <pubDate>Tue, 01 Sep 2015 06:32:57 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/spark-python-java-gateway-process-exited-before-sending-the/m-p/30265#M21919</guid>
      <dc:creator>Sunil</dc:creator>
      <dc:date>2015-09-01T06:32:57Z</dc:date>
    </item>
    <item>
      <title>Re: Spark + Python - Java gateway process exited before sending the driver its port number?</title>
      <link>https://community.databricks.com/t5/data-engineering/spark-python-java-gateway-process-exited-before-sending-the/m-p/30266#M21920</link>
      <description>&lt;P&gt;&lt;/P&gt;
&lt;P&gt;I'm facing the same problem, does anybody know how to connect Spark in Ipython notebook?&lt;/P&gt;
&lt;P&gt;The issue I created,&lt;/P&gt;
&lt;P&gt;&lt;A href="https://github.com/jupyter/notebook/issues/743" target="test_blank"&gt;https://github.com/jupyter/notebook/issues/743&lt;/A&gt;&lt;/P&gt;
&lt;P&gt;&lt;/P&gt;</description>
      <pubDate>Thu, 26 Nov 2015 03:31:01 GMT</pubDate>
      <guid>https://community.databricks.com/t5/data-engineering/spark-python-java-gateway-process-exited-before-sending-the/m-p/30266#M21920</guid>
      <dc:creator>EricaLi</dc:creator>
      <dc:date>2015-11-26T03:31:01Z</dc:date>
    </item>
  </channel>
</rss>

