Commit 07f684e1 authored by Daniele Venzano's avatar Daniele Venzano

Add iPython notebooks, fix a couple of bugs

parent 5db61347
__version__ = '0.8.90'
__version__ = '0.8.91'
......@@ -132,3 +132,52 @@ def spark_submit_app(name: str,
submit = spark_submit_proc(worker_mem_limit, submit_image, commandline, spark_options)
app.processes.append(submit)
return app
def spark_ipython_notebook_proc(mem_limit: int, image: str, spark_options: str) -> ZoeApplicationProcess:
proc = ZoeApplicationProcess()
proc.name = "spark-ipython"
proc.docker_image = image
proc.monitor = True
proc.required_resources["memory"] = mem_limit
port_app = ZoeProcessEndpoint()
port_app.name = "Spark application web interface"
port_app.protocol = "http"
port_app.port_number = 4040
port_app.path = "/"
port_app.is_main_endpoint = False
proc.ports.append(port_app)
port_nb = ZoeProcessEndpoint()
port_nb.name = "iPython Notebook interface"
port_nb.protocol = "http"
port_nb.port_number = 8888
port_nb.path = "/"
port_nb.is_main_endpoint = True
proc.ports.append(port_nb)
proc.environment.append(["SPARK_MASTER_IP", "spark-master-{execution_id}"])
proc.environment.append(["SPARK_OPTIONS", spark_options])
proc.environment.append(["SPARK_EXECUTOR_RAM", str(mem_limit)])
return proc
def spark_ipython_notebook_app(name: str,
master_mem_limit: int,
worker_count: int,
worker_mem_limit: int,
worker_cores: int,
master_image: str,
worker_image: str,
notebook_image: str,
spark_options: str) -> ZoeApplication:
app = ZoeApplication()
app.name = name
app.will_end = False
app.priority = 512
app.requires_binary = False
master = spark_master_proc(master_mem_limit, master_image)
app.processes.append(master)
workers = spark_worker_proc(worker_count, worker_mem_limit, worker_cores, worker_image)
app.processes += workers
notebook = spark_ipython_notebook_proc(worker_mem_limit, notebook_image, spark_options)
app.processes.append(notebook)
return app
......@@ -112,15 +112,17 @@ class PlatformManager:
state = AlchemySession()
all_containers = state.query(ContainerState).all()
for c in all_containers:
if c.cluster.execution is None: # The execution has been deleted, cleanup and forget anything ever happened
self.swarm.terminate_container(c.docker_id, delete=True)
state.delete(c)
try:
DDNSUpdater().delete_a_record(c.readable_name, c.ip_address)
except DDNSUpdateFailed as e:
log.error(e.value)
if not self.is_container_alive(c):
if c.cluster.execution is None: # The execution has been deleted, cleanup and forget everything ever happened
self.swarm.terminate_container(c.docker_id, delete=True)
state.delete(c)
try:
DDNSUpdater().delete_a_record(c.readable_name, c.ip_address)
except DDNSUpdateFailed as e:
log.error(e.value)
elif c.cluster.execution.status == "running" or c.cluster.execution.status == "cleaning up":
if c.cluster.execution is None:
continue # cleaned-up by the if above
if c.cluster.execution.status == "running" or c.cluster.execution.status == "cleaning up":
self._container_died(state, c)
for e in state.query(ExecutionState).filter_by(status='cleaning up').all():
......
......@@ -8,7 +8,7 @@ import zoe_client.applications as ap
import zoe_client.diagnostics as di
import zoe_client.executions as ex
import zoe_client.users as us
from zoe_client.predefined_apps.spark import spark_notebook_app, spark_submit_app
from zoe_client.predefined_apps.spark import spark_notebook_app, spark_submit_app, spark_ipython_notebook_app
import common.zoe_storage_client as storage
api_bp = Blueprint('api', __name__)
......@@ -65,7 +65,7 @@ def application_new():
if app_name is None:
return jsonify(status='error', msg='missing app_name in POST')
fcontents = None
if form_data['app_type'] == "spark-notebook" or form_data['app_type'] == "spark-submit":
if form_data['app_type'] == "spark-notebook" or form_data['app_type'] == "spark-submit" or form_data['app_type'] == "ipython-notebook":
params['name'] = app_name
keys = ["worker_count", 'master_mem_limit', 'worker_cores', 'worker_mem_limit', 'spark_options', 'master_image', 'worker_image']
for key in keys:
......@@ -102,6 +102,12 @@ def application_new():
fcontents = file_data.stream.read()
app_descr = spark_submit_app(**params)
elif form_data['app_type'] == "ipython-notebook":
notebook_image = _form_field(form_data, 'ipython_image')
if notebook_image is None:
return jsonify(status='error', msg='missing notebook_image in POST')
params['notebook_image'] = notebook_image
app_descr = spark_ipython_notebook_app(**params)
else:
log.error("unknown application type: {}".format(form_data['app_type']))
......
......@@ -12,8 +12,8 @@
<input type="text" autofocus autocomplete="on" required pattern="[a-z0-9_\-]+" name="app_name" id="app_name" placeholder="myapp"><br/>
<label for="app_type">Type:</label><br/>
<input id="radio_nb" type="radio" name="app_type" value="spark-notebook" checked>&nbsp;Spark Notebook<br/>
<input id="radio_inb" type="radio" name="app_type" value="ipython-notebook" disabled>&nbsp;iPython Notebook (coming soon)<br/>
<input id="radio_inb" type="radio" name="app_type" value="ipython-notebook" checked>&nbsp;Spark iPython Notebook<br/>
<input id="radio_nb" type="radio" name="app_type" value="spark-notebook">&nbsp;Spark Notebook<br/>
<input id="radio_sub" type="radio" name="app_type" value="spark-submit">&nbsp;Spark Application<br/>
<input id="radio_zk" type="radio" name="app_type" value="zookeeper" disabled>&nbsp;Zookeeper (coming soon)<br/>
</section>
......@@ -60,6 +60,15 @@
</section>
</div>
</div>
<div class="branch" id="ipython-notebook">
<div class="submit step">
<h3 class="step-header">Spark: iPython notebook settings</h3>
<section>
<label for="ipython_image">Spark iPython Notebook Docker image:</label>
<input type="text" autofocus autocomplete="on" required pattern="[a-z0-9_\-=]+" name="ipython_image" id="ipython_image" size="40" value="/zoerepo/spark-ipython-notebook">
</section>
</div>
</div>
<div class="branch" id="spark-submit">
<div class="submit step">
<h3 class="step-header">Spark: application settings</h3>
......@@ -103,7 +112,7 @@
transitions: {
'app_type': function( state, action ) {
var app_type = $("[name=app_type]:checked").val();
if (app_type == "spark-notebook" || app_type == "spark-submit") {
if (app_type == "spark-notebook" || app_type == "spark-submit" || app_type == "ipython-notebook") {
return "spark";
}
},
......
......@@ -97,7 +97,7 @@
<tbody>
{% for e in past_executions %}
<tr class="{{ loop.cycle('odd', 'even') }}">
<td>{{ e[0].name }}</td>
<td>{{ e[0].description.name }}</td>
<td>{{ e[1].name }}</td>
<td>{{ e[1].status }}</td>
<td><script>format_timestamp({{ e[1].time_scheduled.timestamp() }})</script></td>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment