Commit 03363c15 authored by Daniele Venzano's avatar Daniele Venzano

Use a configuration file

parent 0d10a66e
...@@ -59,4 +59,4 @@ docs/_build/ ...@@ -59,4 +59,4 @@ docs/_build/
# PyBuilder # PyBuilder
target/ target/
.idea/ .idea/
smtp_pass.txt zoe.conf
...@@ -4,5 +4,4 @@ python: ...@@ -4,5 +4,4 @@ python:
install: install:
- pip install --allow-all-external -r requirements.txt - pip install --allow-all-external -r requirements.txt
- pip install nose - pip install nose
- touch smtp_pass.txt
script: nosetests script: nosetests
conf = { from configparser import ConfigParser
'docker_swarm_manager': 'tcp://bf1.bigfoot.eurecom.fr:2380',
'status_refresh_interval': 10, rpycconf = {
'scheduler_task_interval': 10,
'db_connection': 'mysql+mysqlconnector://zoe:6sz2tfPuzBcCLdEz@m1.bigfoot.eurecom.fr/zoe',
'redis_server': '192.168.45.2',
'redis_port': '6379',
'redis_db': 0,
'apache-proxy-config-file': '/tmp/zoe-proxy.conf',
'apache-log-file': '/var/log/apache2/access.log',
'proxy_update_accesses': 300,
'check_health': 30,
'notebook_max_age_no_activity': 24,
'notebook_warning_age_no_activity': 2,
'email_task_interval': 300,
'client_rpyc_autodiscovery': True, 'client_rpyc_autodiscovery': True,
'client_rpyc_server': None, 'client_rpyc_server': None,
'client_rpyc_port': None, 'client_rpyc_port': None,
'proxy_path_prefix': '/proxy',
'smtp_server': 'smtp.gmail.com',
'smtp_user': 'bigfoot.data@gmail.com',
'smtp_pass': open('smtp_pass.txt', 'r').read().strip(),
'web_server_name': 'bigfoot-m2.eurecom.fr',
'history_path': "/var/lib/zoe/history"
} }
config_paths = [
'zoe.conf',
'/etc/zoe/zoe.conf'
]
defaults = {
'docker': {
'swarm_manager_url': 'tcp://swarm.example.com:2380'
},
'intervals': {
'status_refresh': 10,
'scheduler_task': 10,
'proxy_update_accesses': 300,
'check_health': 30,
'notebook_max_age_no_activity': 24,
'notebook_warning_age_no_activity': 2
},
'db': {
'url': 'mysql+mysqlconnector://zoe:pass@dbhost/zoe'
},
'apache': {
'proxy_config_file': '/tmp/zoe-proxy.conf',
'access_log': '/var/log/apache2/access.log',
'web_server_name': 'bigfoot-m2.eurecom.fr',
'proxy_path_prefix': '/proxy'
},
'smtp': {
'server': 'smtp.exmaple.com',
'user': 'zoe@exmaple.com',
'password': 'changeme'
},
'filesystem': {
'history_path': "/var/lib/zoe/history"
},
'flask': {
'secret_key': b"\xc3\xb0\xa7\xff\x8fH'\xf7m\x1c\xa2\x92F\x1d\xdcz\x05\xe6CJN5\x83!"
}
}
class ZoeConfig(ConfigParser):
def __init__(self):
super().__init__(interpolation=None)
self.read_dict(defaults)
def write_defaults(self, fp):
tmp = ZoeConfig()
tmp.write(fp)
@property
def history_path(self) -> str:
return self.get('filesystem', 'history_path')
@property
def web_server_name(self) -> str:
return self.get('apache', 'web_server_name')
@property
def proxy_path_url_prefix(self) -> str:
return self.get('apache', 'proxy_path_prefix')
@property
def smtp_server(self) -> str:
return self.get('smtp', 'server')
@property
def smtp_user(self) -> str:
return self.get('smtp', 'user')
@property
def smtp_password(self) -> str:
return self.get('smtp', 'password')
@property
def notebook_warning_age_no_activity(self) -> int:
return self.getint('intervals', 'notebook_warning_age_no_activity')
@property
def notebook_max_age_no_activity(self) -> int:
return self.getint('intervals', 'notebook_max_age_no_activity')
@property
def interval_check_health(self) -> int:
return self.getint('intervals', 'check_health')
@property
def interval_proxy_update_accesses(self) -> int:
return self.getint('intervals', 'proxy_update_accesses')
@property
def apache_log_file(self) -> str:
return self.get('apache', 'access_log')
@property
def apache_proxy_config_file(self) -> str:
return self.get('apache', 'proxy_config_file')
@property
def db_url(self) -> str:
return self.get('db', 'url')
@property
def interval_scheduler_task(self) -> int:
return self.getint('intervals', 'scheduler_task')
@property
def interval_status_refresh(self) -> int:
return self.getint('intervals', 'status_refresh')
@property
def docker_swarm_manager(self) -> str:
return self.get('docker', 'swarm_manager_url')
@property
def cookies_secret_key(self):
return self.get('flask', 'secret_key')
zoeconf = ZoeConfig()
zoeconf.read(config_paths)
...@@ -2,24 +2,24 @@ import os ...@@ -2,24 +2,24 @@ import os
import logging import logging
from common.state import Application, Execution from common.state import Application, Execution
from common.configuration import conf from common.configuration import zoeconf
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
def application_data_upload(application: Application, data: bytes) -> bool: def application_data_upload(application: Application, data: bytes) -> bool:
fpath = os.path.join(conf['history_path'], 'apps', 'app-{}.zip'.format(application.id)) fpath = os.path.join(zoeconf.history_path, 'apps', 'app-{}.zip'.format(application.id))
open(fpath, "wb").write(data) open(fpath, "wb").write(data)
def application_data_download(application: Application) -> bytes: def application_data_download(application: Application) -> bytes:
fpath = os.path.join(conf['history_path'], 'apps', 'app-{}.zip'.format(application.id)) fpath = os.path.join(zoeconf.history_path, 'apps', 'app-{}.zip'.format(application.id))
data = open(fpath, "rb").read() data = open(fpath, "rb").read()
return data return data
def application_data_delete(application: Application): def application_data_delete(application: Application):
fpath = os.path.join(conf['history_path'], 'apps', 'app-{}.zip'.format(application.id)) fpath = os.path.join(zoeconf.history_path, 'apps', 'app-{}.zip'.format(application.id))
try: try:
os.unlink(fpath) os.unlink(fpath)
except OSError: except OSError:
...@@ -27,18 +27,18 @@ def application_data_delete(application: Application): ...@@ -27,18 +27,18 @@ def application_data_delete(application: Application):
def logs_archive_upload(execution: Execution, data: bytes) -> bool: def logs_archive_upload(execution: Execution, data: bytes) -> bool:
fpath = os.path.join(conf['history_path'], 'logs', 'log-{}.zip'.format(execution.id)) fpath = os.path.join(zoeconf.history_path, 'logs', 'log-{}.zip'.format(execution.id))
open(fpath, "wb").write(data) open(fpath, "wb").write(data)
def logs_archive_download(execution: Execution) -> bytes: def logs_archive_download(execution: Execution) -> bytes:
fpath = os.path.join(conf['history_path'], 'logs', 'log-{}.zip'.format(execution.id)) fpath = os.path.join(zoeconf.history_path, 'logs', 'log-{}.zip'.format(execution.id))
data = open(fpath, "rb").read() data = open(fpath, "rb").read()
return data return data
def logs_archive_delete(execution: Execution): def logs_archive_delete(execution: Execution):
fpath = os.path.join(conf['history_path'], 'logs', 'log-{}.zip'.format(execution.id)) fpath = os.path.join(zoeconf.history_path, 'logs', 'log-{}.zip'.format(execution.id))
try: try:
os.unlink(fpath) os.unlink(fpath)
except OSError: except OSError:
......
...@@ -2,11 +2,11 @@ from sqlalchemy import create_engine ...@@ -2,11 +2,11 @@ from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.ext.declarative import declarative_base
from common.configuration import conf from common.configuration import zoeconf
Base = declarative_base() Base = declarative_base()
_engine = create_engine(conf["db_connection"], echo=False) _engine = create_engine(zoeconf.db_url, echo=False)
AlchemySession = sessionmaker(bind=_engine) AlchemySession = sessionmaker(bind=_engine)
from common.state.container import Container from common.state.container import Container
......
from common.state.execution import Execution from common.state.execution import Execution
from common.state import Proxy, AlchemySession, Application from common.state import Proxy, AlchemySession, Application
from common.configuration import conf from common.configuration import zoeconf
def generate_log_history_url(execution: Execution) -> str: def generate_log_history_url(execution: Execution) -> str:
zoe_web_log_history_path = '/api/history/logs/' zoe_web_log_history_path = '/api/history/logs/'
return 'http://' + conf['web_server_name'] + zoe_web_log_history_path + str(execution.id) return 'http://' + zoeconf.web_server_name + zoe_web_log_history_path + str(execution.id)
def generate_notebook_url(execution: Execution) -> str: def generate_notebook_url(execution: Execution) -> str:
state = AlchemySession() state = AlchemySession()
c = execution.find_container("spark-notebook") c = execution.find_container("spark-notebook")
pr = state.query(Proxy).filter_by(container_id=c.id, service_name="Spark Notebook interface").one() pr = state.query(Proxy).filter_by(container_id=c.id, service_name="Spark Notebook interface").one()
return 'http://' + conf['web_server_name'] + conf['proxy_path_prefix'] + '/{}'.format(pr.id) return 'http://' + zoeconf.web_server_name + zoeconf.proxy_path_url_prefix + '/{}'.format(pr.id)
def generate_application_binary_url(application: Application) -> str: def generate_application_binary_url(application: Application) -> str:
return 'http://' + conf['web_server_name'] + '/api/applications/download/{}'.format(application.id) return 'http://' + zoeconf.web_server_name + '/api/applications/download/{}'.format(application.id)
...@@ -4,11 +4,11 @@ import argparse ...@@ -4,11 +4,11 @@ import argparse
import logging import logging
from tornado.wsgi import WSGIContainer from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop, PeriodicCallback from tornado.ioloop import IOLoop
from zoe_web import app from zoe_web import app
from common.configuration import conf from common.configuration import rpycconf
log = logging.getLogger("zoe_web") log = logging.getLogger("zoe_web")
...@@ -32,11 +32,11 @@ def main(): ...@@ -32,11 +32,11 @@ def main():
logging.getLogger("tornado").setLevel(logging.WARNING) logging.getLogger("tornado").setLevel(logging.WARNING)
if args.rpyc_server is None: if args.rpyc_server is None:
conf['client_rpyc_autodiscovery'] = True rpycconf['client_rpyc_autodiscovery'] = True
else: else:
conf['client_rpyc_autodiscovery'] = False rpycconf['client_rpyc_autodiscovery'] = False
conf['client_rpyc_server'] = args.rpyc_server rpycconf['client_rpyc_server'] = args.rpyc_server
conf['client_rpyc_port'] = args.rpyc_port rpycconf['client_rpyc_port'] = args.rpyc_port
log.info("Starting HTTP server...") log.info("Starting HTTP server...")
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024 app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024
......
...@@ -11,7 +11,7 @@ from common.application_resources import SparkApplicationResources ...@@ -11,7 +11,7 @@ from common.application_resources import SparkApplicationResources
from common.status import PlatformStatusReport, ApplicationStatusReport from common.status import PlatformStatusReport, ApplicationStatusReport
from common.exceptions import UserIDDoesNotExist, ApplicationStillRunning from common.exceptions import UserIDDoesNotExist, ApplicationStillRunning
import common.object_storage as storage import common.object_storage as storage
from common.configuration import conf from common.configuration import zoeconf, rpycconf
REGISTRY = "10.1.0.1:5000" REGISTRY = "10.1.0.1:5000"
MASTER_IMAGE = REGISTRY + "/zoe/spark-master-1.4.1:1.3" MASTER_IMAGE = REGISTRY + "/zoe/spark-master-1.4.1:1.3"
...@@ -232,11 +232,11 @@ class ZoeClient: ...@@ -232,11 +232,11 @@ class ZoeClient:
if isinstance(execution.application, SparkNotebookApplication): if isinstance(execution.application, SparkNotebookApplication):
c = execution.find_container("spark-notebook") c = execution.find_container("spark-notebook")
pr = self.state.query(Proxy).filter_by(container_id=c.id, service_name="Spark Notebook interface").one() pr = self.state.query(Proxy).filter_by(container_id=c.id, service_name="Spark Notebook interface").one()
return conf['proxy_path_prefix'] + '/{}'.format(pr.id) return zoeconf.proxy_path_url_prefix + '/{}'.format(pr.id)
elif isinstance(execution.application, SparkSubmitApplication): elif isinstance(execution.application, SparkSubmitApplication):
c = execution.find_container("spark-submit") c = execution.find_container("spark-submit")
pr = self.state.query(Proxy).filter_by(container_id=c.id, service_name="Spark application web interface").one() pr = self.state.query(Proxy).filter_by(container_id=c.id, service_name="Spark application web interface").one()
return conf['proxy_path_prefix'] + '/{}'.format(pr.id) return zoeconf.proxy_path_url_prefix + '/{}'.format(pr.id)
else: else:
return None return None
...@@ -280,7 +280,7 @@ class ZoeClient: ...@@ -280,7 +280,7 @@ class ZoeClient:
def get_zoe_client(): def get_zoe_client():
if conf['client_rpyc_autodiscovery']: if rpycconf['client_rpyc_autodiscovery']:
return ZoeClient() return ZoeClient()
else: else:
return ZoeClient(conf['client_rpyc_server'], conf['client_rpyc_port']) return ZoeClient(rpycconf['client_rpyc_server'], rpycconf['client_rpyc_port'])
from datetime import datetime, timedelta
import smtplib import smtplib
from email.mime.text import MIMEText from email.mime.text import MIMEText
import logging import logging
...@@ -7,7 +6,7 @@ from jinja2 import Template ...@@ -7,7 +6,7 @@ from jinja2 import Template
from common.status import SparkSubmitExecution, Execution from common.status import SparkSubmitExecution, Execution
from common.urls import generate_log_history_url, generate_notebook_url from common.urls import generate_log_history_url, generate_notebook_url
from common.configuration import conf from common.configuration import zoeconf
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
...@@ -71,8 +70,8 @@ def notify_notebook_notice(execution: Execution): ...@@ -71,8 +70,8 @@ def notify_notebook_notice(execution: Execution):
subject = "[Zoe] Notebook termination warning" subject = "[Zoe] Notebook termination warning"
template_vars = { template_vars = {
'grace_time': conf['notebook_max_age_no_activity'] - conf['notebook_warning_age_no_activity'], 'grace_time': zoeconf.notebook_max_age_no_activity - zoeconf.notebook_warning_age_no_activity,
'wrn_age': conf['notebook_warning_age_no_activity'], 'wrn_age': zoeconf.notebook_warning_age_no_activity,
'nb_url': generate_notebook_url(execution) 'nb_url': generate_notebook_url(execution)
} }
send_email(email, subject, NOTEBOOK_WARNING_EMAIL_TEMPLATE, template_vars) send_email(email, subject, NOTEBOOK_WARNING_EMAIL_TEMPLATE, template_vars)
...@@ -83,7 +82,7 @@ def notify_notebook_termination(execution: Execution): ...@@ -83,7 +82,7 @@ def notify_notebook_termination(execution: Execution):
email = app.user.email email = app.user.email
subject = "[Zoe] Notebook terminated" subject = "[Zoe] Notebook terminated"
template_vars = {'max_age': conf['notebook_max_age_no_activity']} template_vars = {'max_age': zoeconf.notebook_max_age_no_activity}
send_email(email, subject, NOTEBOOK_KILLED_EMAIL_TEMPLATE, template_vars) send_email(email, subject, NOTEBOOK_KILLED_EMAIL_TEMPLATE, template_vars)
...@@ -94,9 +93,9 @@ def send_email(address, subject, template, template_vars): ...@@ -94,9 +93,9 @@ def send_email(address, subject, template, template_vars):
msg['Subject'] = subject msg['Subject'] = subject
msg['From'] = 'noreply@bigfoot.eurecom.fr' msg['From'] = 'noreply@bigfoot.eurecom.fr'
msg['To'] = address msg['To'] = address
s = smtplib.SMTP(conf['smtp_server']) s = smtplib.SMTP(zoeconf.smtp_server)
s.ehlo() s.ehlo()
s.starttls() s.starttls()
s.login(conf['smtp_user'], conf['smtp_pass']) s.login(zoeconf.smtp_user, zoeconf.smtp_password)
s.send_message(msg) s.send_message(msg)
s.quit() s.quit()
...@@ -11,7 +11,7 @@ from zoe_scheduler.emails import notify_execution_finished, notify_notebook_noti ...@@ -11,7 +11,7 @@ from zoe_scheduler.emails import notify_execution_finished, notify_notebook_noti
from common.state import AlchemySession, Cluster, Container, SparkApplication, Proxy, Execution, SparkNotebookApplication, SparkSubmitApplication, SparkSubmitExecution from common.state import AlchemySession, Cluster, Container, SparkApplication, Proxy, Execution, SparkNotebookApplication, SparkSubmitApplication, SparkSubmitExecution
from common.application_resources import ApplicationResources from common.application_resources import ApplicationResources
from common.exceptions import CannotCreateCluster from common.exceptions import CannotCreateCluster
from common.configuration import conf from common.configuration import zoeconf
from common.object_storage import logs_archive_upload from common.object_storage import logs_archive_upload
from common.urls import generate_application_binary_url from common.urls import generate_application_binary_url
...@@ -209,11 +209,11 @@ class PlatformManager: ...@@ -209,11 +209,11 @@ class PlatformManager:
c = e.find_container("spark-notebook") c = e.find_container("spark-notebook")
if c is not None: if c is not None:
pr = state.query(Proxy).filter_by(container_id=c.id, service_name="Spark Notebook interface").one() pr = state.query(Proxy).filter_by(container_id=c.id, service_name="Spark Notebook interface").one()
if datetime.now() - pr.last_access > timedelta(hours=conf["notebook_max_age_no_activity"]): if datetime.now() - pr.last_access > timedelta(hours=zoeconf.notebook_max_age_no_activity):
log.info("Killing spark notebook {} for inactivity".format(e.id)) log.info("Killing spark notebook {} for inactivity".format(e.id))
self.execution_terminate(state, e) self.execution_terminate(state, e)
notify_notebook_termination(e) notify_notebook_termination(e)
if datetime.now() - pr.last_access > timedelta(hours=conf["notebook_max_age_no_activity"]) - timedelta(hours=conf["notebook_warning_age_no_activity"]): if datetime.now() - pr.last_access > timedelta(hours=zoeconf.notebook_max_age_no_activity) - timedelta(hours=zoeconf.notebook_warning_age_no_activity):
log.info("Spark notebook {} is on notice for inactivity".format(e.id)) log.info("Spark notebook {} is on notice for inactivity".format(e.id))
e.termination_notice = True e.termination_notice = True
notify_notebook_notice(e) notify_notebook_notice(e)
......
...@@ -3,13 +3,14 @@ from urllib.parse import urlparse ...@@ -3,13 +3,14 @@ from urllib.parse import urlparse
import re import re
from datetime import datetime from datetime import datetime
import logging import logging
log = logging.getLogger(__name__)
from jinja2 import Template from jinja2 import Template
from common.configuration import conf from common.configuration import zoeconf
from common.state import AlchemySession, Proxy from common.state import AlchemySession, Proxy
log = logging.getLogger(__name__)
LOOP_INTERVAL = 1 # seconds LOOP_INTERVAL = 1 # seconds
ACCESS_TIME_REFRESH_INTERVAL = 60 # seconds ACCESS_TIME_REFRESH_INTERVAL = 60 # seconds
...@@ -39,8 +40,8 @@ ENTRY_TEMPLATE = """ ...@@ -39,8 +40,8 @@ ENTRY_TEMPLATE = """
class ProxyManager: class ProxyManager:
def __init__(self): def __init__(self):
self.apache_conf_filepath = conf["apache-proxy-config-file"] self.apache_conf_filepath = zoeconf.apache_proxy_config_file
self.apache_access_log = conf["apache-log-file"] self.apache_access_log = zoeconf.apache_log_file
def _get_proxy_entries(self): def _get_proxy_entries(self):
state = AlchemySession() state = AlchemySession()
......
...@@ -5,7 +5,7 @@ from zoe_scheduler.platform_status import PlatformStatus ...@@ -5,7 +5,7 @@ from zoe_scheduler.platform_status import PlatformStatus
from zoe_scheduler.periodic_tasks import PeriodicTaskManager from zoe_scheduler.periodic_tasks import PeriodicTaskManager
from zoe_scheduler.proxy_manager import pm from zoe_scheduler.proxy_manager import pm
from common.configuration import conf from common.configuration import zoeconf
from common.state import Execution from common.state import Execution
from common.application_resources import ApplicationResources from common.application_resources import ApplicationResources
...@@ -66,10 +66,10 @@ class ZoeScheduler: ...@@ -66,10 +66,10 @@ class ZoeScheduler:
self.scheduler_policy = SimpleSchedulerPolicy(self.platform_status) self.scheduler_policy = SimpleSchedulerPolicy(self.platform_status)
def init_tasks(self, tm: PeriodicTaskManager): def init_tasks(self, tm: PeriodicTaskManager):
tm.add_task("platform status updater", self.platform_status.update, conf["status_refresh_interval"]) tm.add_task("platform status updater", self.platform_status.update, zoeconf.interval_status_refresh)
tm.add_task("scheduler", self.schedule, conf['scheduler_task_interval']) tm.add_task("scheduler", self.schedule, zoeconf.interval_scheduler_task)
tm.add_task("proxy access timestamp updater", pm.update_proxy_access_timestamps, conf['proxy_update_accesses']) tm.add_task("proxy access timestamp updater", pm.update_proxy_access_timestamps, zoeconf.interval_proxy_update_accesses)
tm.add_task("execution health checker", self.platform.check_executions_health, conf["check_health"]) tm.add_task("execution health checker", self.platform.check_executions_health, zoeconf.interval_check_health)
def incoming(self, execution: Execution) -> bool: def incoming(self, execution: Execution) -> bool:
if not self.scheduler_policy.admission_control(execution.application.required_resources): if not self.scheduler_policy.admission_control(execution.application.required_resources):
......
import time import time
import logging import logging
log = logging.getLogger(__name__)
import docker import docker
import docker.utils import docker.utils
import docker.errors import docker.errors
from common.configuration import conf from common.configuration import zoeconf
from zoe_scheduler.swarm_status import SwarmStatus, SwarmNodeStatus from zoe_scheduler.swarm_status import SwarmStatus, SwarmNodeStatus
log = logging.getLogger(__name__)
class SwarmClient: class SwarmClient:
def __init__(self): def __init__(self):
manager = conf['docker_swarm_manager'] manager = zoeconf.docker_swarm_manager
self.cli = docker.Client(base_url=manager) self.cli = docker.Client(base_url=manager)
def info(self) -> SwarmStatus: def info(self) -> SwarmStatus:
......
from datetime import datetime from flask import Flask
from flask import Flask, url_for
from zoe_web.api import api_bp from zoe_web.api import api_bp
from zoe_web.web import web_bp from zoe_web.web import web_bp
from common.configuration import zoeconf
app = Flask(__name__, static_url_path='/does-not-exist') app = Flask(__name__, static_url_path='/does-not-exist')
app.register_blueprint(web_bp, url_prefix='') app.register_blueprint(web_bp, url_prefix='')
app.register_blueprint(api_bp, url_prefix='/api') app.register_blueprint(api_bp, url_prefix='/api')
app.secret_key = b"\xc3\xb0\xa7\xff\x8fH'\xf7m\x1c\xa2\x92F\x1d\xdcz\x05\xe6CJN5\x83!" app.secret_key = zoeconf.cookies_secret_key
...@@ -2,7 +2,6 @@ from flask import render_template, redirect, url_for, abort ...@@ -2,7 +2,6 @@ from flask import render_template, redirect, url_for, abort
from zoe_web import app from zoe_web import app
from zoe_client import ZoeClient from zoe_client import ZoeClient
from common.configuration import conf
@app.route("/web/<int:user_id>/cluster/<int:app_id>/inspect") @app.route("/web/<int:user_id>/cluster/<int:app_id>/inspect")
......
...@@ -6,7 +6,7 @@ from zipfile import is_zipfile ...@@ -6,7 +6,7 @@ from zipfile import is_zipfile
from zoe_client import get_zoe_client from zoe_client import get_zoe_client
from common.state import create_tables from common.state import create_tables
from common.configuration import conf from common.configuration import zoeconf, rpycconf
argparser = None argparser = None
...@@ -122,6 +122,10 @@ def log_get_cmd(args): ...@@ -122,6 +122,10 @@ def log_get_cmd(args):
print(log) print(log)
def gen_config_cmd(args):
zoeconf.write(open(args.output_file, "w"))
def process_arguments() -> Namespace: def process_arguments() -> Namespace:
global argparser global argparser
argparser = ArgumentParser(description="Zoe - Container Analytics as a Service command-line client") argparser = ArgumentParser(description="Zoe - Container Analytics as a Service command-line client")
...@@ -197,6 +201,10 @@ def process_arguments() -> Namespace: ...@@ -197,6 +201,10 @@ def process_arguments() -> Namespace:
argparser_log_get.add_argument('id', type=int, help="Container id") argparser_log_get.add_argument('id', type=int, help="Container id")
argparser_log_get.set_defaults(func=log_get_cmd) argparser_log_get.set_defaults(func=log_get_cmd)
argparser_log_get = subparser.add_parser('write-config', help="Generates a sample file containing current configuration values")
argparser_log_get.add_argument('output_file', help="Filename to create with default configuration")
argparser_log_get.set_defaults(func=gen_config_cmd)
return argparser.parse_args() return argparser.parse_args()
...@@ -208,11 +216,11 @@ def main(): ...@@ -208,11 +216,11 @@ def main():
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)
if args.rpyc_server is None: if args.rpyc_server is None:
conf['client_rpyc_autodiscovery'] = True rpycconf['client_rpyc_autodiscovery'] = True
else: else:
conf['client_rpyc_autodiscovery'] = False rpycconf['client_rpyc_autodiscovery'] = False
conf['client_rpyc_server'] = args.rpyc_server rpycconf['client_rpyc_server'] = args.rpyc_server
conf['client_rpyc_port'] = args.rpyc_port rpycconf['client_rpyc_port'] = args.rpyc_port
try: try:
args.func(args) args.func(args)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment