Commit e3cabaf9 authored by Daniele Venzano's avatar Daniele Venzano

Create entrypoints

Move main() script code inside packages to satisfy the best practices for python packaging.
parent 0b5b1343
__version__ = '0.8.1'
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
import sys
print("For now this does not work")
sys.exit(1)
here = path.abspath(path.dirname(__file__))
version = {}
with open("common/version.py") as fp:
exec(fp.read(), version)
version = version['__version__']
# Get the long description from the relevant file
with open(path.join(here, 'DESCRIPTION.rst'), encoding='utf-8') as f:
long_description = f.read()
long_description = open('README.md').read()
setup(
name='zoe',
......@@ -28,7 +13,7 @@ setup(
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version='0.8.0',
version=version,
description='Zoe - Analytics on demand',
long_description=long_description,
......@@ -76,42 +61,51 @@ setup(
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['scripts']),
packages=find_packages(exclude=['scripts', 'tests']),
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=['peppercorn'],
install_requires=['docker-py>=1.3.0',
'Flask>=0.10.1',
'python-dateutil>=2.4.2',
'SQLAlchemy>=1.0.8',
'tornado>=4.2.1',
'zmq>=14.0.1'
],
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
extras_require={
'dev': ['check-manifest'],
'test': ['coverage'],
'dev': ['Sphinx'],
'test': ['coverage', 'pytest'],
},
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
package_data={
'sample': ['package_data.dat'],
'': ['*.sh', '*.conf', '*.rst', '*.css', '*.js', '*.html'],
},
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
data_files=[('my_data', ['data/data_file'])],
# data_files=[('my_data', ['data/data_file'])],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
entry_points={
'console_scripts': [
'sample=sample:main',
],
},
'zoe-manage=zoe_scheduler.entrypoint:zoe_manage',
'zoe-scheduler=zoe_scheduler.entrypoint:zoe_scheduler',
'zoe-web=zoe_web.entrypoint:zoe_web',
'zoe=zoe_client.entrypoint:zoe'
]
}
)
#!/usr/bin/env python3
from argparse import ArgumentParser, Namespace
import logging
# This script is useful to run Zoe without going through the pip install process when developing
from zoe_scheduler.state import create_tables, init as state_init
from common.configuration import init as conf_init, zoeconf
argparser = None
db_engine = None
def setup_db_cmd(_):
create_tables(db_engine)
def process_arguments() -> Namespace:
global argparser
argparser = ArgumentParser(description="Zoe - Container Analytics as a Service ops client")
argparser.add_argument('-d', '--debug', action='store_true', default=False, help='Enable debug output')
subparser = argparser.add_subparsers(title='subcommands', description='valid subcommands')
argparser_setup_db = subparser.add_parser('setup-db', help="Create the tables in the database")
argparser_setup_db.set_defaults(func=setup_db_cmd)
return argparser.parse_args()
def main():
global db_engine
args = process_arguments()
if args.debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
conf_init()
db_engine = state_init(zoeconf().db_url)
try:
args.func(args)
except AttributeError:
argparser.print_help()
return
from zoe_scheduler.entrypoint import zoe_manage
if __name__ == "__main__":
main()
zoe_manage()
#!/usr/bin/env python3
import argparse
import logging
# This script is useful to run Zoe without going through the pip install process when developing
from zoe_scheduler.scheduler import ZoeScheduler
from zoe_scheduler.periodic_tasks import PeriodicTaskManager
from zoe_scheduler.ipc import ZoeIPCServer
from zoe_scheduler.object_storage import init_history_paths
from zoe_scheduler.state import init as state_init
from zoe_scheduler.proxy_manager import init as proxy_init
from zoe_scheduler.entrypoint import zoe_scheduler
from common.configuration import init as conf_init
log = logging.getLogger('zoe')
def process_arguments() -> argparse.Namespace:
argparser = argparse.ArgumentParser(description="Zoe Scheduler - Container Analytics as a Service scheduling component")
argparser.add_argument('-d', '--debug', action='store_true', help='Enable debug output')
argparser.add_argument('--ipc-server-port', type=int, default=8723, help='Port the IPC server should bind to')
return argparser.parse_args()
def main():
args = process_arguments()
if args.debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
logging.getLogger('requests').setLevel(logging.WARNING)
zoeconf = conf_init()
state_init(zoeconf.db_url)
proxy_init()
zoe_sched = ZoeScheduler()
ipc_server = ZoeIPCServer(zoe_sched, args.ipc_server_port)
if not init_history_paths():
return
tm = PeriodicTaskManager()
barrier = zoe_sched.init_tasks(tm)
barrier.wait() # wait for all tasks to be ready and running
ipc_server.start_thread()
zoe_sched.loop()
tm.stop_all()
if __name__ == "__main__":
main()
if __name__ == '__main__':
zoe_scheduler()
#!/usr/bin/env python3
import argparse
import logging
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from zoe_web import app
from common.configuration import ipcconf, init as conf_init
log = logging.getLogger("zoe_web")
def process_arguments() -> argparse.Namespace:
argparser = argparse.ArgumentParser(description="Zoe Web - Container Analytics as a Service web client")
argparser.add_argument('-d', '--debug', action='store_true', default=False, help='Enable debug output')
argparser.add_argument('--ipc-server', default='localhost', help='Address of the Zoe scheduler process')
argparser.add_argument('--ipc-port', default=8723, type=int, help='Port of the Zoe scheduler process')
return argparser.parse_args()
def main():
args = process_arguments()
if args.debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("tornado").setLevel(logging.WARNING)
ipcconf['server'] = args.ipc_server
ipcconf['port'] = args.ipc_port
zoeconf = conf_init()
log.info("Starting HTTP server...")
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024
app.secret_key = zoeconf.cookies_secret_key
http_server = HTTPServer(WSGIContainer(app))
http_server.listen(5000, "0.0.0.0")
ioloop = IOLoop.instance()
# PeriodicCallback(email_task, int(conf["email_task_interval"]) * 1000).start()
try:
ioloop.start()
except KeyboardInterrupt:
print("CTRL-C detected, terminating")
# This script is useful to run Zoe without going through the pip install process when developing
from zoe_web.entrypoint import zoe_web
if __name__ == "__main__":
main()
zoe_web()
#!/usr/bin/env python3
from argparse import ArgumentParser, Namespace
import logging
from zipfile import is_zipfile
from pprint import pprint
# This script is useful to run Zoe without going through the pip install process when developing
from zoe_client import ZoeClient
from common.configuration import init as conf_init, zoeconf
argparser = None
def get_zoe_client(args) -> ZoeClient:
return ZoeClient(args.ipc_server, args.ipc_port)
def stats_cmd(args):
client = get_zoe_client(args)
stats = client.platform_stats()
pprint(stats)
def user_new_cmd(args):
client = get_zoe_client(args)
user = client.user_new(args.email)
print("New user ID: {}".format(user.id))
def user_get_cmd(args):
client = get_zoe_client(args)
user = client.user_get_by_email(args.email)
print("User ID: {}".format(user.id))
def spark_cluster_new_cmd(args):
client = get_zoe_client(args)
application_id = client.application_spark_new(args.user_id, args.worker_count, args.executor_memory, args.executor_cores, args.name)
print("Spark application added with ID: {}".format(application_id))
def spark_notebook_new_cmd(args):
client = get_zoe_client(args)
application_id = client.application_spark_notebook_new(args.user_id, args.worker_count, args.executor_memory, args.executor_cores, args.name)
print("Spark application added with ID: {}".format(application_id))
def spark_submit_new_cmd(args):
if not is_zipfile(args.file):
print("Error: the file specified is not a zip archive")
return
fcontents = open(args.file, "rb").read()
client = get_zoe_client(args)
application_id = client.application_spark_submit_new(args.user_id, args.worker_count, args.executor_memory, args.executor_cores, args.name, fcontents)
print("Spark application added with ID: {}".format(application_id))
def run_spark_cmd(args):
client = get_zoe_client(args)
application = client.application_get(args.id)
if application is None:
print("Error: application {} does not exist".format(args.id))
return
ret = client.execution_spark_new(application.id, args.name, args.cmd, args.spark_opts)
if ret:
print("Application scheduled successfully, use the app-inspect command to check its status")
else:
print("Admission control refused to run the application specified")
def app_rm_cmd(args):
client = get_zoe_client(args)
application = client.application_get(args.id)
if application is None:
print("Error: application {} does not exist".format(args.id))
return
if args.force:
a = client.application_get(application.id)
for eid in a.executions:
e = client.execution_get(eid.id)
if e.status == "running":
print("Terminating execution {}".format(e.name))
client.execution_terminate(e.id)
client.application_remove(application.id, args.force)
def app_inspect_cmd(args):
client = get_zoe_client(args)
application = client.application_get(args.id)
if application is None:
print("Error: application {} does not exist".format(args.id))
return
print(application)
def app_list_cmd(args):
client = get_zoe_client(args)
applications = client.application_list(args.id)
if len(applications) > 0:
print("{:4} {:20} {:25}".format("ID", "Name", "Type"))
for app in applications:
print("{:4} {:20} {:25}".format(app.id, app.name, app.type))
def exec_kill_cmd(args):
client = get_zoe_client(args)
execution = client.execution_get(args.id)
if execution is None:
print("Error: execution {} does not exist".format(args.id))
return
client.execution_terminate(execution.id)
def log_get_cmd(args):
client = get_zoe_client(args)
log = client.log_get(args.id)
if log is None:
print("Error: No log found for container ID {}".format(args.id))
print(log)
def gen_config_cmd(args):
zoeconf().write(open(args.output_file, "w"))
def container_stats_cmd(args):
client = get_zoe_client(args)
stats = client.container_stats(args.container_id)
print(stats)
def process_arguments() -> Namespace:
global argparser
argparser = ArgumentParser(description="Zoe - Container Analytics as a Service command-line client")
argparser.add_argument('-d', '--debug', action='store_true', default=False, help='Enable debug output')
argparser.add_argument('--ipc-server', default='localhost', help='Address of the Zoe scheduler process')
argparser.add_argument('--ipc-port', default=8723, type=int, help='Port of the Zoe scheduler process')
subparser = argparser.add_subparsers(title='subcommands', description='valid subcommands')
argparser_stats = subparser.add_parser('stats', help="Show the platform statistics")
argparser_stats.set_defaults(func=stats_cmd)
argparser_user_new = subparser.add_parser('user-new', help="Create a new user")
argparser_user_new.add_argument('email', help="User email address")
argparser_user_new.set_defaults(func=user_new_cmd)
argparser_user_get = subparser.add_parser('user-get', help="Get the user id for an existing user")
argparser_user_get.add_argument('email', help="User email address")
argparser_user_get.set_defaults(func=user_get_cmd)
argparser_spark_cluster_create = subparser.add_parser('app-spark-cluster-new', help="Setup a new empty Spark cluster")
argparser_spark_cluster_create.add_argument('--user-id', type=int, required=True, help='Application owner')
argparser_spark_cluster_create.add_argument('--name', required=True, help='Application name')
argparser_spark_cluster_create.add_argument('--worker-count', type=int, default=2, help='Number of workers')
argparser_spark_cluster_create.add_argument('--executor-memory', default='2g', help='Maximum memory available per-worker, the system assumes only one executor per worker')
argparser_spark_cluster_create.add_argument('--executor-cores', default='2', type=int, help='Number of cores to assign to each executor')
argparser_spark_cluster_create.set_defaults(func=spark_cluster_new_cmd)
argparser_spark_nb_create = subparser.add_parser('app-spark-notebook-new', help="Setup a new Spark Notebook application")
argparser_spark_nb_create.add_argument('--user-id', type=int, required=True, help='Notebook owner')
argparser_spark_nb_create.add_argument('--name', required=True, help='Notebook name')
argparser_spark_nb_create.add_argument('--worker-count', type=int, default=2, help='Number of workers')
argparser_spark_nb_create.add_argument('--executor-memory', default='2g', help='Maximum memory available per-worker, the system assumes only one executor per worker')
argparser_spark_nb_create.add_argument('--executor-cores', default='2', type=int, help='Number of cores to assign to each executor')
argparser_spark_nb_create.set_defaults(func=spark_notebook_new_cmd)
argparser_spark_submit_create = subparser.add_parser('app-spark-new', help="Setup a new Spark submit application")
argparser_spark_submit_create.add_argument('--user-id', type=int, required=True, help='Application owner')
argparser_spark_submit_create.add_argument('--name', required=True, help='Application name')
argparser_spark_submit_create.add_argument('--worker-count', type=int, default=2, help='Number of workers')
argparser_spark_submit_create.add_argument('--executor-memory', default='2g', help='Maximum memory available per-worker, the system assumes only one executor per worker')
argparser_spark_submit_create.add_argument('--executor-cores', default='2', type=int, help='Number of cores to assign to each executor')
argparser_spark_submit_create.add_argument('--file', required=True, help='zip archive containing the application files')
argparser_spark_submit_create.set_defaults(func=spark_submit_new_cmd)
argparser_app_rm = subparser.add_parser('app-rm', help="Delete an application")
argparser_app_rm.add_argument('id', type=int, help="Application id")
argparser_app_rm.add_argument('-f', '--force', action="store_true", help="Kill also all active executions, if any")
argparser_app_rm.set_defaults(func=app_rm_cmd)
argparser_app_inspect = subparser.add_parser('app-inspect', help="Gather details about an application and its active executions")
argparser_app_inspect.add_argument('id', type=int, help="Application id")
argparser_app_inspect.set_defaults(func=app_inspect_cmd)
argparser_app_inspect = subparser.add_parser('app-ls', help="List all applications for a user")
argparser_app_inspect.add_argument('id', type=int, help="User id")
argparser_app_inspect.set_defaults(func=app_list_cmd)
argparser_spark_app_run = subparser.add_parser('run', help="Execute a previously registered Spark application")
argparser_spark_app_run.add_argument('id', type=int, help="Application id")
argparser_spark_app_run.add_argument('--name', required=True, help='Execution name')
argparser_spark_app_run.add_argument('--cmd', help="Command-line to pass to spark-submit")
argparser_spark_app_run.add_argument('--spark-opts', help="Optional Spark options to pass to spark-submit")
argparser_spark_app_run.set_defaults(func=run_spark_cmd)
argparser_execution_kill = subparser.add_parser('execution-kill', help="Terminates an execution")
argparser_execution_kill.add_argument('id', type=int, help="Execution id")
argparser_execution_kill.set_defaults(func=exec_kill_cmd)
argparser_log_get = subparser.add_parser('log-get', help="Retrieves the logs of a running container")
argparser_log_get.add_argument('id', type=int, help="Container id")
argparser_log_get.set_defaults(func=log_get_cmd)
argparser_log_get = subparser.add_parser('write-config', help="Generates a sample file containing current configuration values")
argparser_log_get.add_argument('output_file', help="Filename to create with default configuration")
argparser_log_get.set_defaults(func=gen_config_cmd)
argparser_container_stats = subparser.add_parser('container-stats', help="Retrieve statistics on a running container")
argparser_container_stats.add_argument('container_id', help="ID of the container")
argparser_container_stats.set_defaults(func=container_stats_cmd)
return argparser.parse_args()
def main():
args = process_arguments()
if args.debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
conf_init()
try:
args.func(args)
except AttributeError:
argparser.print_help()
return
from zoe_client.entrypoint import zoe
if __name__ == "__main__":
main()
zoe()
from argparse import ArgumentParser, Namespace
import logging
from zipfile import is_zipfile
from pprint import pprint
from zoe_client import ZoeClient
from common.configuration import init as conf_init, zoeconf
argparser = None
def get_zoe_client(args) -> ZoeClient:
return ZoeClient(args.ipc_server, args.ipc_port)
def stats_cmd(args):
client = get_zoe_client(args)
stats = client.platform_stats()
pprint(stats)
def user_new_cmd(args):
client = get_zoe_client(args)
user = client.user_new(args.email)
print("New user ID: {}".format(user.id))
def user_get_cmd(args):
client = get_zoe_client(args)
user = client.user_get_by_email(args.email)
print("User ID: {}".format(user.id))
def spark_cluster_new_cmd(args):
client = get_zoe_client(args)
application_id = client.application_spark_new(args.user_id, args.worker_count, args.executor_memory, args.executor_cores, args.name)
print("Spark application added with ID: {}".format(application_id))
def spark_notebook_new_cmd(args):
client = get_zoe_client(args)
application_id = client.application_spark_notebook_new(args.user_id, args.worker_count, args.executor_memory, args.executor_cores, args.name)
print("Spark application added with ID: {}".format(application_id))
def spark_submit_new_cmd(args):
if not is_zipfile(args.file):
print("Error: the file specified is not a zip archive")
return
fcontents = open(args.file, "rb").read()
client = get_zoe_client(args)
application_id = client.application_spark_submit_new(args.user_id, args.worker_count, args.executor_memory, args.executor_cores, args.name, fcontents)
print("Spark application added with ID: {}".format(application_id))
def run_spark_cmd(args):
client = get_zoe_client(args)
application = client.application_get(args.id)
if application is None:
print("Error: application {} does not exist".format(args.id))
return
ret = client.execution_spark_new(application.id, args.name, args.cmd, args.spark_opts)
if ret:
print("Application scheduled successfully, use the app-inspect command to check its status")
else:
print("Admission control refused to run the application specified")
def app_rm_cmd(args):
client = get_zoe_client(args)
application = client.application_get(args.id)
if application is None:
print("Error: application {} does not exist".format(args.id))
return
if args.force:
a = client.application_get(application.id)
for eid in a.executions:
e = client.execution_get(eid.id)
if e.status == "running":
print("Terminating execution {}".format(e.name))
client.execution_terminate(e.id)
client.application_remove(application.id, args.force)
def app_inspect_cmd(args):
client = get_zoe_client(args)
application = client.application_get(args.id)
if application is None:
print("Error: application {} does not exist".format(args.id))
return
print(application)
def app_list_cmd(args):
client = get_zoe_client(args)
applications = client.application_list(args.id)
if len(applications) > 0:
print("{:4} {:20} {:25}".format("ID", "Name", "Type"))
for app in applications:
print("{:4} {:20} {:25}".format(app.id, app.name, app.type))
def exec_kill_cmd(args):
client = get_zoe_client(args)
execution = client.execution_get(args.id)
if execution is None:
print("Error: execution {} does not exist".format(args.id))
return
client.execution_terminate(execution.id)
def log_get_cmd(args):
client = get_zoe_client(args)
log = client.log_get(args.id)
if log is None:
print("Error: No log found for container ID {}".format(args.id))
print(log)
def gen_config_cmd(args):
zoeconf().write(open(args.output_file, "w"))
def container_stats_cmd(args):
client = get_zoe_client(args)
stats = client.container_stats(args.container_id)
print(stats)
def process_arguments() -> Namespace:
global argparser
argparser = ArgumentParser(description="Zoe - Container Analytics as a Service command-line client")
argparser.add_argument('-d', '--debug', action='store_true', default=False, help='Enable debug output')
argparser.add_argument('--ipc-server', default='localhost', help='Address of the Zoe scheduler process')
argparser.add_argument('--ipc-port', default=8723, type=int, help='Port of the Zoe scheduler process')
subparser = argparser.add_subparsers(title='subcommands', description='valid subcommands')
argparser_stats = subparser.add_parser('stats', help="Show the platform statistics")
argparser_stats.set_defaults(func=stats_cmd)
argparser_user_new = subparser.add_parser('user-new', help="Create a new user")
argparser_user_new.add_argument('email', help="User email address")
argparser_user_new.set_defaults(func=user_new_cmd)
argparser_user_get = subparser.add_parser('user-get', help="Get the user id for an existing user")
argparser_user_get.add_argument('email', help="User email address")
argparser_user_get.set_defaults(func=user_get_cmd)
argparser_spark_cluster_create = subparser.add_parser('app-spark-cluster-new', help="Setup a new empty Spark cluster")
argparser_spark_cluster_create.add_argument('--user-id', type=int, required=True, help='Application owner')
argparser_spark_cluster_create.add_argument('--name', required=True, help='Application name')
argparser_spark_cluster_create.add_argument('--worker-count', type=int, default=2, help='Number of workers')
argparser_spark_cluster_create.add_argument('--executor-memory', default='2g', help='Maximum memory available per-worker, the system assumes only one executor per worker')
argparser_spark_cluster_create.add_argument('--executor-cores', default='2', type=int, help='Number of cores to assign to each executor')
argparser_spark_cluster_create.set_defaults(func=spark_cluster_new_cmd)
argparser_spark_nb_create = subparser.add_parser('app-spark-notebook-new', help="Setup a new Spark Notebook application")
argparser_spark_nb_create.add_argument('--user-id', type=int, required=True, help='Notebook owner')
argparser_spark_nb_create.add_argument('--name', required=True, help='Notebook name')
argparser_spark_nb_create.add_argument('--worker-count', type=int, default=2, help='Number of workers')
argparser_spark_nb_create.add_argument('--executor-memory', default='2g', help='Maximum memory available per-worker, the system assumes only one executor per worker')
argparser_spark_nb_create.add_argument('--executor-cores', default='2', type=int, help='Number of cores to assign to each executor')
argparser_spark_nb_create.set_defaults(func=spark_notebook_new_cmd)
argparser_spark_submit_create = subparser.add_parser('app-spark-new', help="Setup a new Spark submit application")
argparser_spark_submit_create.add_argument('--user-id', type=int, required=True, help='Application owner')
argparser_spark_submit_create.add_argument('--name', required=True, help='Application name')
argparser_spark_submit_create.add_argument('--worker-count', type=int, default=2, help='Number of workers')
argparser_spark_submit_create.add_argument('--executor-memory', default='2g', help='Maximum memory available per-worker, the system assumes only one executor per worker')
argparser_spark_submit_create.add_argument('--executor-cores', default='2', type=int, help='Number of cores to assign to each executor')
argparser_spark_submit_create.add_argument('--file', required=True, help='zip archive containing the application files')
argparser_spark_submit_create.set_defaults(func=spark_submit_new_cmd)
argparser_app_rm = subparser.add_parser('app-rm', help="Delete an application")
argparser_app_rm.add_argument('id', type=int, help="Application id")
argparser_app_rm.add_argument('-f', '--force', action="store_true", help="Kill also all active executions, if any")
argparser_app_rm.set_defaults(func=app_rm_cmd)
argparser_app_inspect = subparser.add_parser('app-inspect', help="Gather details about an application and its active executions")
argparser_app_inspect.add_argument('id', type=int, help="Application id")
argparser_app_inspect.set_defaults(func=app_inspect_cmd)
argparser_app_inspect = subparser.add_parser('app-ls', help="List all applications for a user")
argparser_app_inspect.add_argument('id', type=int, help="User id")
argparser_app_inspect.set_defaults(func=app_list_cmd)
argparser_spark_app_run = subparser.add_parser('run', help="Execute a previously registered Spark application")
argparser_spark_app_run.add_argument('id', type=int, help="Application id")
argparser_spark_app_run.add_argument('--name', required=True, help='Execution name')
argparser_spark_app_run.add_argument('--cmd', help="Command-line to pass to spark-submit")
argparser_spark_app_run.add_argument('--spark-opts', help="Optional Spark options to pass to spark-submit")
argparser_spark_app_run.set_defaults(func=run_spark_cmd)
argparser_execution_kill = subparser.add_parser('execution-kill', help="Terminates an execution")
argparser_execution_kill.add_argument('id', type=int, help="Execution id")
argparser_execution_kill.set_defaults(func=exec_kill_cmd)
argparser_log_get = subparser.add_parser('log-get', help="Retrieves the logs of a running container")
argparser_log_get.add_argument('id', type=int, help="Container id")
argparser_log_get.set_defaults(func=log_get_cmd)
argparser_log_get = subparser.add_parser('write-config', help="Generates a sample file containing current configuration values")
argparser_log_get.add_argument('output_file', help="Filename to create with default configuration")
argparser_log_get.set_defaults(func=gen_config_cmd)
argparser_container_stats = subparser.add_parser('container-stats', help="Retrieve statistics on a running container")
argparser_container_stats.add_argument('container_id', help="ID of the container")