Commit 018a3f87 authored by Daniele Venzano's avatar Daniele Venzano

Add support for SparkSubmit jobs, to be tested

Ignore IDEA project files
parent e96e1d18
......@@ -58,7 +58,3 @@ docs/_build/
# PyBuilder
target/
caaas.ini
\ No newline at end of file
caaas
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="BashSupportProjectSettings">
<option name="supportBash4" value="true" />
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="jdk" jdkName="Remote Python 3.4.0 (sftp://ubuntu@192.168.45.25:22/usr/bin/python3)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="TemplatesService">
<option name="TEMPLATE_CONFIGURATION" value="Jinja2" />
<option name="TEMPLATE_FOLDERS">
<list>
<option value="$MODULE_DIR$/caaas/templates" />
</list>
</option>
</component>
</module>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectCodeStyleSettingsManager">
<option name="PER_PROJECT_SETTINGS">
<value>
<XML>
<option name="XML_LEGACY_SETTINGS_IMPORTED" value="true" />
</XML>
</value>
</option>
<option name="PREFERRED_PROJECT_CODE_STYLE" value="Default (1)" />
</component>
</project>
\ No newline at end of file
This diff is collapsed.
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="dataSourceStorageLocal">
<data-source name="MySQL - @m1" uuid="a32fd6de-3ffa-40c0-9ec8-8953a89c53e0">
<secret-storage>master_key</secret-storage>
<user-name>caaas</user-name>
<schema-pattern>caaas.*</schema-pattern>
<default-schemas>caaas.*</default-schemas>
</data-source>
<data-source name="MySQL - @m1 devel" uuid="33b1ec79-4374-4ff8-a8f8-26c89b418a79">
<secret-storage>master_key</secret-storage>
<user-name>caaas_devel</user-name>
<schema-pattern>caaas_devel.*</schema-pattern>
<default-schemas>caaas_devel.*</default-schemas>
</data-source>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="DataSourceManagerImpl" format="xml" hash="330654499">
<data-source source="LOCAL" name="MySQL - @m1" uuid="a32fd6de-3ffa-40c0-9ec8-8953a89c53e0">
<driver-ref>mysql</driver-ref>
<synchronize>true</synchronize>
<jdbc-driver>com.mysql.jdbc.Driver</jdbc-driver>
<jdbc-url>jdbc:mysql://localhost:3306</jdbc-url>
<driver-properties>
<property name="zeroDateTimeBehavior" value="convertToNull" />
<property name="tinyInt1isBit" value="false" />
<property name="characterEncoding" value="utf8" />
<property name="characterSetResults" value="utf8" />
<property name="yearIsDateType" value="false" />
</driver-properties>
<libraries />
</data-source>
<data-source source="LOCAL" name="MySQL - @m1 devel" uuid="33b1ec79-4374-4ff8-a8f8-26c89b418a79">
<driver-ref>mysql</driver-ref>
<synchronize>true</synchronize>
<jdbc-driver>com.mysql.jdbc.Driver</jdbc-driver>
<jdbc-url>jdbc:mysql://localhost:3306</jdbc-url>
<driver-properties>
<property name="zeroDateTimeBehavior" value="convertToNull" />
<property name="tinyInt1isBit" value="false" />
<property name="characterEncoding" value="utf8" />
<property name="characterSetResults" value="utf8" />
<property name="yearIsDateType" value="false" />
</driver-properties>
<libraries />
</data-source>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="PublishConfigData" autoUpload="Always" serverName="vm" deleteMissingItems="true" traceLevel="DETAILS">
<serverData>
<paths name="bfm2">
<serverdata>
<mappings>
<mapping deploy="/caaas" local="$PROJECT_DIR$" web="/" />
</mappings>
</serverdata>
</paths>
<paths name="vm">
<serverdata>
<mappings>
<mapping deploy="/caaas" local="$PROJECT_DIR$" web="/" />
</mappings>
</serverdata>
</paths>
</serverData>
<option name="myAutoUpload" value="ALWAYS" />
</component>
</project>
\ No newline at end of file
<component name="ProjectDictionaryState">
<dictionary name="venzano">
<words>
<w>caaas</w>
<w>jinja</w>
<w>venza</w>
</words>
</dictionary>
</component>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="Encoding">
<file url="file://$PROJECT_DIR$/caaas_web/sql.py" charset="UTF-8" />
<file url="file://$PROJECT_DIR$/scripts/images/notebook/files/start-notebook.sh" charset="UTF-8" />
<file url="PROJECT" charset="UTF-8" />
</component>
</project>
\ No newline at end of file
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="PyPackageRequirementsInspection" enabled="true" level="WARNING" enabled_by_default="true">
<option name="ignoredPackages">
<value>
<list size="1">
<item index="0" class="java.lang.String" itemvalue="mysql" />
</list>
</value>
</option>
</inspection_tool>
<inspection_tool class="SpellCheckingInspection" enabled="true" level="TYPO" enabled_by_default="true">
<option name="processCode" value="false" />
<option name="processLiterals" value="true" />
<option name="processComments" value="true" />
</inspection_tool>
</profile>
</component>
\ No newline at end of file
<component name="InspectionProjectProfileManager">
<settings>
<option name="PROJECT_PROFILE" value="Project Default" />
<option name="USE_PROJECT_PROFILE" value="true" />
<version value="1.0" />
</settings>
</component>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectLevelVcsManager" settingsEditedManually="false">
<OptionsSetting value="true" id="Add" />
<OptionsSetting value="true" id="Remove" />
<OptionsSetting value="true" id="Checkout" />
<OptionsSetting value="true" id="Update" />
<OptionsSetting value="true" id="Status" />
<OptionsSetting value="true" id="Edit" />
<ConfirmationsSetting value="0" id="Add" />
<ConfirmationsSetting value="0" id="Remove" />
</component>
<component name="ProjectRootManager" version="2" project-jdk-name="Remote Python 3.4.0 (sftp://ubuntu@192.168.45.25:22/usr/bin/python3)" project-jdk-type="Python SDK" />
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/caaas.iml" filepath="$PROJECT_DIR$/.idea/caaas.iml" />
</modules>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="SqlDialectMappings">
<file url="file://$PROJECT_DIR$" dialect="MySQL" />
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="WebResourcesPaths">
<contentEntries>
<entry url="file://$PROJECT_DIR$">
<entryData>
<resourceRoots>
<path value="file://$PROJECT_DIR$/caaas_web" />
</resourceRoots>
</entryData>
</entry>
</contentEntries>
</component>
</project>
\ No newline at end of file
This diff is collapsed.
# CAaaS - Container Analytics as a Service
# Zoe - Container Analytics as a Service
This web application uses a Docker Swarm cluster to run on-demand Spark clusters.
This application uses a Docker Swarm cluster to run on-demand Spark clusters.
IT is composed of three components:
* zoectl: command-line client
* zoe-scheduler: the main daemon that performs application scheduling and talks to Swarm
* zoe-web: the web service
## Requirements
......@@ -34,7 +40,7 @@ ProxyHTMLEvents onclick ondblclick onmousedown onmouseup \
onunload onsubmit onreset onselect onchange
ProxyRequests Off
IncludeOptional /tmp/caaas-proxy.conf*
IncludeOptional /tmp/zoe-proxy.conf*
```
If you need to proxy the web application itself, add also these directives:
......
from hashlib import md5
from os import system
from time import sleep
from urllib.parse import urlparse
import re
from datetime import datetime
from jinja2 import Template
from caaas_web.sql import CAaaState
from caaas_web.config_parser import config
LOOP_INTERVAL = 1 # seconds
ACCESS_TIME_REFRESH_INTERVAL = 60 # seconds
ENTRY_TEMPLATE = """
# CAaaS proxy entry for service {{ service_name }}
<Location /proxy/{{ proxy_id }}>
ProxyHtmlEnable On
ProxyHTMLExtended On
ProxyPass {{ proxy_url }} retry=1
ProxyPassReverse {{ proxy_url }}
{% if service_name != "notebook" %}
ProxyHTMLURLMap ^/(.*)$ /proxy/{{ proxy_id }}/$1 RL
ProxyHTMLURLMap ^logPage(.*)$ /proxy/{{ proxy_id }}/logPage$1 RL
ProxyHTMLURLMap ^app(.*)$ /proxy/{{ proxy_id }}/app$1 RL
{% for node in nodes %}
ProxyHTMLURLMap ^http://{{ node[0] }}(.*)$ /proxy/{{node[1]}}$1 RL
{% endfor %}
{% endif %}
</Location>
{% if service_name == "notebook" %}
<Location /proxy/{{ proxy_id }}/ws/>
ProxyPass ws://{{ netloc }}/proxy/{{ proxy_id }}/ws/
</Location>
{% endif %}
"""
def get_proxy_entries():
db = CAaaState()
return db.get_proxies()
def generate_file(proxy_entries):
output = ""
jinja_template = Template(ENTRY_TEMPLATE)
node_list = []
for p in proxy_entries:
netloc = urlparse(p["internal_url"])[1]
node_list.append((netloc, p["id"]))
for p in proxy_entries:
netloc = urlparse(p["internal_url"])[1]
jinja_dict = {
"proxy_id": p["id"],
"proxy_url": p["internal_url"],
"service_name": p["service_name"],
"netloc": netloc,
"nodes": node_list
}
apache_entry = jinja_template.render(jinja_dict)
output += apache_entry + "\n"
return output
def check_difference(generated_file):
m_new = md5()
m_new.update(generated_file.encode('ascii'))
m_old = md5()
try:
m_old.update(open(config.proxy_apache_config).read().encode('ascii'))
except FileNotFoundError:
return True
return m_new.digest() != m_old.digest()
def commit_and_reload(generated_file):
print("Apache config requires an update, committing and reloading")
open(config.proxy_apache_config, "w").write(generated_file)
system("sudo service apache2 reload")
def update_proxy():
entries = get_proxy_entries()
output = generate_file(entries)
if check_difference(output):
commit_and_reload(output)
def update_proxy_access_timestamps():
regex = re.compile('[0-9.]+ - - \[(.*)\] "GET /proxy/([0-9a-z\-]+)/')
log = open(config.proxy_apache_access_log, 'r')
last_accesses = {}
for line in log:
match = re.match(regex, line)
if match is not None:
proxy_id = match.group(2)
timestamp = datetime.strptime(match.group(1), "%d/%b/%Y:%H:%M:%S %z")
last_accesses[proxy_id] = timestamp
state = CAaaState()
for proxy in state.get_proxies():
proxy_id = proxy['id']
if proxy_id in last_accesses:
state.update_proxy_access(proxy_id, last_accesses[proxy_id])
if __name__ == "__main__":
print("CAaaS Apache proxy synchronization starting")
access_time_refresh_delay = ACCESS_TIME_REFRESH_INTERVAL
while True:
# print("Checking proxy entries...")
update_proxy()
# print("Checking for completed applications to clean up")
sleep(LOOP_INTERVAL)
access_time_refresh_delay -= LOOP_INTERVAL
if access_time_refresh_delay <= 0:
update_proxy_access_timestamps()
access_time_refresh_delay = ACCESS_TIME_REFRESH_INTERVAL
from caaas_client.client import CAaaSClient
......@@ -10,7 +10,10 @@ class SparkApplicationResources(ApplicationResources):
def __init__(self):
self.master_resources = {}
self.worker_resources = {}
self.notebook_resources = {}
self.client_resources = {}
self.worker_count = 0
self.container_count = 0
def core_count(self) -> int:
if "cores" in self.worker_resources:
......
......@@ -2,5 +2,11 @@ conf = {
'docker_swarm_manager': 'tcp://m2:2380',
'status_refresh_interval': 10,
'scheduler_task_interval': 10,
'db_connection': 'mysql+mysqlconnector://caaas_devel:JrJZp8NRY6GzauHj@m1.bigfoot.eurecom.fr/caaas_devel'
'db_connection': 'mysql+mysqlconnector://zoe:6sz2tfPuzBcCLdEz@m1.bigfoot.eurecom.fr/zoe',
'redis_server': '192.168.45.25',
'redis_port': '6379',
'redis_db': 0,
'apache-proxy-config-file': '/tmp/zoe-proxy.conf',
'apache-log-file': '/var/log/apache2/access.log',
'proxy_update_accesses': 300
}
class CAaaSException(Exception):
class ZoeException(Exception):
def __init__(self):
self.value = 'Something happened'
......@@ -6,16 +6,16 @@ class CAaaSException(Exception):
return repr(self.value)
class UserIDDoesNotExist(CAaaSException):
class UserIDDoesNotExist(ZoeException):
def __init__(self, user_id):
self.value = "The user ID {} does not exist".format(user_id)
class ApplicationStillRunning(CAaaSException):
class ApplicationStillRunning(ZoeException):
def __init__(self, application):
self.value = "The application {} cannot be removed because it is in use".format(application.id)
class CannotCreateCluster(CAaaSException):
class CannotCreateCluster(ZoeException):
def __init__(self, application):
self.value = "Cannot create a cluster for application {}".format(application.id)
import redis
from common.state import Application, Execution
from common.configuration import conf
def _connect():
server = conf["redis_server"]
port = conf["redis_port"]
db = conf["redis_db"]
return redis.StrictRedis(host=server, port=port, db=db)
def application_data_upload(application: Application, data: bytes) -> bool:
r = _connect()
key = "app-{}".format(application.id)
r.set(key, data)
def application_data_download(application: Application) -> bytes:
r = _connect()
key = "app-{}".format(application.id)
return r.get(key)
def logs_archive_upload(execution: Execution, data: bytes) -> bool:
r = _connect()
key = "log-{}".format(execution.id)
r.set(key, data)
def logs_archive_download(execution: Execution) -> bytes:
r = _connect()
key = "log-{}".format(execution.id)
return r.get(key)
......@@ -11,7 +11,7 @@ AlchemySession = sessionmaker(bind=_engine)
from common.state.container import Container
from common.state.cluster import Cluster
from common.state.application import Application, SparkApplication
from common.state.application import Application, SparkApplication, SparkNotebookApplication, SparkSubmitApplication
from common.state.user import User
from common.state.proxy import Proxy
from common.state.execution import Execution, SparkSubmitExecution
......
......@@ -48,3 +48,29 @@ class SparkApplication(Application):
ret["master_image"] = self.master_image
ret["worker_image"] = self.worker_image
return ret
class SparkNotebookApplication(SparkApplication):
notebook_image = Column(String(256))
__mapper_args__ = {
'polymorphic_identity': 'spark-notebook'
}
def to_dict(self) -> dict:
ret = super().to_dict()
ret["notebook_image"] = self.notebook_image
return ret
class SparkSubmitApplication(SparkApplication):
submit_image = Column(String(256))
__mapper_args__ = {
'polymorphic_identity': 'spark-submit'
}
def to_dict(self) -> dict:
ret = super().to_dict()
ret["submit_image"] = self.submit_image
return ret
from pprint import pformat
from caaas_scheduler.swarm_status import SwarmStatus
from zoe_scheduler.swarm_status import SwarmStatus
from common.state import Application, Execution, SparkSubmitExecution
......@@ -49,8 +49,8 @@ class ApplicationStatusReport(Report):
else:
exrep['finished_at'] = execution.time_finished.timestamp()
if type(execution) is SparkSubmitExecution:
exrep["commandline"] = execution.commmandline
if isinstance(execution, SparkSubmitExecution):
exrep["commandline"] = execution.commandline
exrep["spark_opts"] = execution.spark_opts
exrep["cluster"] = []
......
#!/usr/bin/env bash
P3=`which python3`
$P3 ./zoectl.py user-new venzano@eurecom.fr
$P3 ./zoectl.py spark-notebook-new --user-id 1 --name "small notebook" --worker-count 2 --executor-memory 2g --executor-cores 2
$P3 ./zoectl.py spark-app-new --user-id 1 --name "wordcount medium" --worker-count 8 --executor-memory 8g --executor-cores 8 --file ../wordcount.zip
......@@ -2,13 +2,14 @@
SPARK_VER=1.4.1
HADOOP_VER=hadoop2.4
IMAGE_VER=1.2
python ./gen_dockerfiles.py ${SPARK_VER} ${HADOOP_VER}
for d in master worker shell submit notebook; do
cd $d
docker build -t 10.0.0.2:5000/venza/spark-$d:${SPARK_VER} .
docker push 10.0.0.2:5000/venza/spark-$d:${SPARK_VER}
docker build -t 10.0.0.2:5000/zoe/spark-$d-${SPARK_VER}:${IMAGE_VER} .
docker push 10.0.0.2:5000/zoe/spark-$d-${SPARK_VER}:${IMAGE_VER}
cd ..
docker -H 10.0.0.2:2380 pull 10.0.0.2:5000/venza/spark-$d:${SPARK_VER}
docker -H 10.0.0.2:2380 pull 10.0.0.2:5000/zoe/spark-$d-${SPARK_VER}:${IMAGE_VER}
done
#!/usr/bin/env bash
cd $1
/opt/spark/bin/spark-submit --master spark://${SPARK_MASTER_IP}:7077 --executor-memory=${SPARK_EXECUTOR_RAM} ${SPARK_OPTIONS} "${@:2}"
if [ -z ${APPLICATION_ID} ]; then
echo "No application ID provided, cannot continue"
exit 1
fi
mkdir /tmp/${APPLICATION_ID}
cd /tmp/${APPLICATION_ID}
redis-cli ${REDIS_CLI_OPTIONS} get app-${APPLICATION_ID} > app.zip
unzip app.zip
/opt/spark/bin/spark-submit --master spark://${SPARK_MASTER_IP}:7077 --executor-memory=${SPARK_EXECUTOR_RAM} ${SPARK_OPTIONS} "$@"
......@@ -9,7 +9,7 @@ RUN apt-get update && apt-get install -y --force-yes software-properties-common
RUN apt-add-repository -y ppa:webupd8team/java
RUN /bin/echo debconf shared/accepted-oracle-license-v1-1 select true | /usr/bin/debconf-set-selections
RUN apt-get update && apt-get -y install oracle-java7-installer oracle-java7-set-default curl
RUN apt-get update && apt-get -y install oracle-java7-installer oracle-java7-set-default curl unzip redis-tools
RUN curl -s http://mirrors.ircam.fr/pub/apache/spark/spark-${SPARK_VERSION}/spark-${SPARK_VERSION}-bin-${HADOOP_VERSION}.tgz | tar -xz -C /opt/
......
#!/usr/bin/env bash
cd $1
/opt/spark/bin/spark-submit --master spark://${SPARK_MASTER_IP}:7077 --executor-memory=${SPARK_EXECUTOR_RAM} "${@:2}"
if [ -z ${APPLICATION_ID} ]; then
echo "No application ID provided, cannot continue"
exit 1
fi
mkdir /tmp/${APPLICATION_ID}
cd /tmp/${APPLICATION_ID}
redis-cli ${REDIS_CLI_OPTIONS} get app-${APPLICATION_ID} > app.zip
unzip app.zip
/opt/spark/bin/spark-submit --master spark://${SPARK_MASTER_IP}:7077 --executor-memory=${SPARK_EXECUTOR_RAM} ${SPARK_OPTIONS} "$@"
#!/bin/sh
SPARK_VER=1.4.1
IMAGE_VER=1.2
for d in master worker shell submit notebook; do
docker -H 10.0.0.2:2380 pull 10.0.0.2:5000/venza/spark-${d}:${SPARK_VER}
docker -H 10.0.0.2:2380 pull 10.0.0.2:5000/zoe/spark-$d-${SPARK_VER}:${IMAGE_VER}
done
RUN apt-get update && apt-get install -y --force-yes curl unzip redis-tools
CMD /opt/submit.sh
#ADD start-master.sh /start-master.sh
......
......@@ -2,9 +2,9 @@ import asyncio
import logging
import signal
from caaas_scheduler.rpyc_service import CAaaSSchedulerRPCService
from caaas_scheduler.rpyc_server import RPyCAsyncIOServer
from caaas_scheduler.scheduler import caaas_sched
from zoe_scheduler.rpyc_service import ZoeSchedulerRPCService
from zoe_scheduler.rpyc_server import RPyCAsyncIOServer
from zoe_scheduler.scheduler import zoe_sched
def sigint_handler():
......@@ -15,14 +15,14 @@ if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('requests').setLevel(logging.WARNING)
logging.getLogger('asyncio').setLevel(logging.INFO)
log = logging.getLogger('caaas')
log = logging.getLogger('zoe')
loop = asyncio.get_event_loop()
loop.add_signal_handler(signal.SIGINT, sigint_handler)
rpyc_server = RPyCAsyncIOServer(CAaaSSchedulerRPCService, '0.0.0.0', port=4000, auto_register=True)
rpyc_server = RPyCAsyncIOServer(ZoeSchedulerRPCService, '0.0.0.0', port=4000, auto_register=True)
rpyc_server.start()
caaas_sched.init_tasks()
zoe_sched.init_tasks()
try:
loop.run_forever()
......
......@@ -3,12 +3,12 @@ from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop, PeriodicCallback
from caaas_web import app
from caaas_web.cleanup_thread import cleanup_task
from caaas_web.config_parser import config
from zoe_web import app
from zoe_web.cleanup_thread import cleanup_task
from zoe_web.config_parser import config
DEBUG = True
log = logging.getLogger("caaas_web")
log = logging.getLogger("zoe_web")
def main():
......
from zoe_client.client import ZoeClient
import rpyc
from sqlalchemy.orm.exc import NoResultFound
from common.state import AlchemySession, SparkApplication, User, Application, Cluster, SparkSubmitExecution, Execution
from common.state import AlchemySession, SparkApplication, User, Application, Cluster, SparkSubmitExecution, Execution, SparkNotebookApplication, SparkSubmitApplication
from common.application_resources import SparkApplicationResources
from common.status import PlatformStatusReport
from common.exceptions import UserIDDoesNotExist, ApplicationStillRunning
import common.object_storage as storage
REGISTRY = "10.0.0.2:5000"
MASTER_IMAGE = REGISTRY + "/venza/spark-master:1.4.1"
WORKER_IMAGE = REGISTRY + "/venza/spark-worker:1.4.1"
SHELL_IMAGE = REGISTRY + "/venza/spark-shell:1.4.1"
SUBMIT_IMAGE = REGISTRY + "/venza/spark-submit:1.4.1"
NOTEBOOK_IMAGE = REGISTRY + "/venza/spark-notebook:1.4.1"
MASTER_IMAGE = REGISTRY + "/zoe/spark-master-1.4.1:1.2"
WORKER_IMAGE = REGISTRY + "/zoe/spark-worker-1.4.1:1.2"
SHELL_IMAGE = REGISTRY + "/zoe/spark-shell-1.4.1:1.2"
SUBMIT_IMAGE = REGISTRY + "/zoe/spark-submit-1.4.1:1.2"
NOTEBOOK_IMAGE = REGISTRY + "/zoe/spark-notebook-1.4.1:1.2"
class CAaaSClient:
class ZoeClient:
def __init__(self):
self.server_connection = rpyc.connect_by_service("CAaaSSchedulerRPC")
self.server_connection = rpyc.connect_by_service("ZoeSchedulerRPC")
self.server = self.server_connection.root
self.state = AlchemySession()
......@@ -33,47 +34,96 @@ class CAaaSClient:
def platform_status(self) -> PlatformStatusReport:
return self.server.get_platform_status()