Commit 92783187 authored by Daniele Venzano's avatar Daniele Venzano

Move out all source in other repositories, update README

parent d1fbc62d
......@@ -58,5 +58,4 @@ docs/_build/
# PyBuilder
language: python
- "3.4"
- pip install -r requirements.txt
- pip install pytest
- bash tests/resources/
- PYTHONPATH=. py.test --test-environment travis --cov=zoe_scheduler --cov=zoe_client --cov=zoe_web
- PYTHONPATH=. sphinx-build -nW -b html -d docs/_build/doctrees docs/ docs/_build/html
Zoe - Container-based Analytics as a Service
Zoe provides a simple way to provision data analytics clusters and
workflows using container-based (Docker) virtualization. The guiding
principles are:
- ease of use: data scientists know about data and applications,
systems and resource constraints should be kept out of the way
- ease of administration: we have a strong background in systems and
network administration, so we put all effort possible to make Zoe
easy to install and maintain
- use well-known technologies: we try hard not to reinvent the wheel,
we use Python, ZeroMQ, Docker and DNS
- a clear roadmap: our short and long-term objectives should always be
clear and well defined
- openness: the source code is open: clone, modify, discuss, test and
contribute, you are welcome!
Zoe provides a simple way to provision data analytics applications using Docker Swarm.
This is the main repository, it contains the documentation and a number of scripts, useful to install and develop Zoe.
We are in the process of updating documentation and scripts. For now you can refer to the version tagged 0.8.92 in this repository,
when all components where still together.
- Documentation:
- Docker images:
- Main website:
- Documentation:
- How to install:
Zoe is a distributed application and each component is developed in a separate Git repository.
- Zoe clients:
- Zoe scheduler:
- Zoe object storage:
Zoe can use any Docker image, but we provide some for the preconfigured applications available in the web interface:
- Docker images:
|Pypi version| |Python version| |Documentation Status| |Requirements Status|
|Documentation Status|
Zoe is licensed under the terms of the Apache 2.0 license.
.. |Pypi version| image::
.. |Python version| image::
.. |Documentation Status| image::
.. |Requirements Status| image::
import logging
from common.exceptions import InvalidApplicationDescription
log = logging.getLogger(__name__)
class ZoeApplication:
def __init__(self): = ''
self.version = 0
self.will_end = True
self.priority = 512
self.requires_binary = False
self.processes = []
def from_dict(cls, data):
ret = cls()
ret.version = int(data["version"])
except ValueError:
raise InvalidApplicationDescription(msg="version field should be an int")
except KeyError:
raise InvalidApplicationDescription(msg="Missing required key: version")
required_keys = ['name', 'will_end', 'priority', 'requires_binary']
for k in required_keys:
setattr(ret, k, data[k])
except KeyError:
raise InvalidApplicationDescription(msg="Missing required key: %s" % k)
ret.will_end = bool(ret.will_end)
except ValueError:
raise InvalidApplicationDescription(msg="will_end field must be a boolean")
ret.requires_binary = bool(ret.requires_binary)
except ValueError:
raise InvalidApplicationDescription(msg="requires_binary field must be a boolean")
ret.priority = int(ret.priority)
except ValueError:
raise InvalidApplicationDescription(msg="priority field must be an int")
if ret.priority < 0 or ret.priority > 1024:
raise InvalidApplicationDescription(msg="priority must be between 0 and 1024")
for p in data['processes']:
found_monitor = False
for p in ret.processes:
if p.monitor:
found_monitor = True
if not found_monitor:
raise InvalidApplicationDescription(msg="at least one process should have monitor set to True")
return ret
def to_dict(self) -> dict:
ret = {
'version': self.version,
'will_end': self.will_end,
'priority': self.priority,
'requires_binary': self.requires_binary,
'processes': []
for p in self.processes:
return ret
def total_memory(self) -> int:
memory = 0
for p in self.processes:
memory += p.required_resources['memory']
return memory
def container_count(self) -> int:
return len(self.processes)
class ZoeProcessEndpoint:
def __init__(self): = ''
self.protocol = ''
self.port_number = 0
self.path = ''
self.is_main_endpoint = False
def to_dict(self) -> dict:
return {
'protocol': self.protocol,
'port_number': self.port_number,
'path': self.path,
'is_main_endpoint': self.is_main_endpoint
def from_dict(cls, data):
ret = cls()
required_keys = ['name', 'protocol', 'port_number', 'is_main_endpoint']
for k in required_keys:
setattr(ret, k, data[k])
except KeyError:
raise InvalidApplicationDescription(msg="Missing required key: %s" % k)
ret.port_number = int(ret.port_number)
except ValueError:
raise InvalidApplicationDescription(msg="port_number field should be an integer")
ret.is_main_endpoint = bool(ret.is_main_endpoint)
except ValueError:
raise InvalidApplicationDescription(msg="is_main_endpoint field should be a boolean")
if 'path' in data:
ret.path = data['path']
return ret
def get_url(self, address):
return self.protocol + "://" + address + ":{}".format(self.port_number) + self.path
class ZoeApplicationProcess:
def __init__(self): = ''
self.version = 0
self.docker_image = ''
self.monitor = False # if this process dies, the whole application is considered as complete and the execution is terminated
self.ports = [] # A list of ZoeProcessEndpoint
self.required_resources = {}
self.environment = [] # Environment variables to pass to Docker
self.command = None # Commandline to pass to the Docker container
def to_dict(self) -> dict:
ret = {
'version': self.version,
'docker_image': self.docker_image,
'monitor': self.monitor,
'ports': [p.to_dict() for p in self.ports],
'required_resources': self.required_resources.copy(),
'environment': self.environment.copy(),
'command': self.command
return ret
def from_dict(cls, data):
ret = cls()
ret.version = int(data["version"])
except ValueError:
raise InvalidApplicationDescription(msg="version field should be an int")
except KeyError:
raise InvalidApplicationDescription(msg="Missing required key: version")
required_keys = ['name', 'docker_image', 'monitor']
for k in required_keys:
setattr(ret, k, data[k])
except KeyError:
raise InvalidApplicationDescription(msg="Missing required key: %s" % k)
ret.monitor = bool(ret.monitor)
except ValueError:
raise InvalidApplicationDescription(msg="monitor field should be a boolean")
if 'ports' not in data:
raise InvalidApplicationDescription(msg="Missing required key: ports")
if not hasattr(data['ports'], '__iter__'):
raise InvalidApplicationDescription(msg='ports should be an iterable')
for pp in data['ports']:
if 'required_resources' not in data:
raise InvalidApplicationDescription(msg="Missing required key: required_resources")
if not isinstance(data['required_resources'], dict):
raise InvalidApplicationDescription(msg="required_resources should be a dictionary")
if 'memory' not in data['required_resources']:
raise InvalidApplicationDescription(msg="Missing required key: required_resources -> memory")
ret.required_resources = data['required_resources'].copy()
ret.required_resources['memory'] = int(ret.required_resources['memory'])
except ValueError:
raise InvalidApplicationDescription(msg="required_resources -> memory field should be an int")
if 'environment' in data:
if not hasattr(data['environment'], '__iter__'):
raise InvalidApplicationDescription(msg='environment should be an iterable')
ret.environment = data['environment'].copy()
for e in ret.environment:
if len(e) != 2:
raise InvalidApplicationDescription(msg='environment variable should have a name and a value')
if not isinstance(e[0], str):
raise InvalidApplicationDescription(msg='environment variable names must be strings: {}'.format(e[0]))
if not isinstance(e[1], str):
raise InvalidApplicationDescription(msg='environment variable values must be strings: {}'.format(e[1]))
if 'command' in data:
ret.command = data['command']
return ret
def exposed_endpoint(self) -> ZoeProcessEndpoint:
for p in self.ports:
assert isinstance(p, ZoeProcessEndpoint)
if p.is_main_endpoint:
return p
return None
This diff is collapsed.
from configparser import ConfigParser
config_paths = [
defaults = {
'common': {
'object_storage_url': 'http://localhost:4390'
'zoe_client': {
'db_connect': 'mysql+mysqlconnector://zoe:pass@dbhost/zoe',
'scheduler_ipc_address': 'localhost',
'scheduler_ipc_port': 8723,
'zoe_web': {
'smtp_server': '',
'smtp_user': '',
'smtp_password': 'changeme',
'cookie_secret': b"\xc3\xb0\xa7\xff\x8fH'\xf7m\x1c\xa2\x92F\x1d\xdcz\x05\xe6CJN5\x83!",
'web_server_name': 'localhost'
'zoe_scheduler': {
'swarm_manager_url': 'tcp://',
'docker_private_registry': '',
'status_refresh_interval': 10,
'check_terminated_interval': 30,
'db_connect': 'mysql+mysqlconnector://zoe:pass@dbhost/zoe',
'ipc_listen_address': '',
'ipc_listen_port': 8723,
'ddns_keyfile': '/path/to/rndc.key',
'ddns_server': '',
'ddns_domain': ''
_zoeconf = None
class ZoeConfig(ConfigParser):
def __init__(self):
def write_defaults(cls, fp):
tmp = cls()
def db_url(self) -> str:
return self.get('zoe_client', 'db_connect')
def ipc_server(self) -> str:
return self.get('zoe_client', 'scheduler_ipc_address')
def ipc_port(self) -> int:
return self.getint('zoe_client', 'scheduler_ipc_port')
def object_storage_url(self) -> str:
return self.get('common', 'object_storage_url')
def web_server_name(self) -> str:
return self.get('zoe_web', 'web_server_name')
def smtp_server(self) -> str:
return self.get('zoe_web', 'smtp_server')
def smtp_user(self) -> str:
return self.get('zoe_web', 'smtp_user')
def smtp_password(self) -> str:
return self.get('zoe_web', 'smtp_password')
def cookies_secret_key(self):
return self.get('zoe_web', 'cookie_secret')
def check_terminated_interval(self) -> int:
return self.getint('zoe_scheduler', 'check_terminated_interval')
def db_url(self) -> str:
return self.get('zoe_scheduler', 'db_connect')
def status_refresh_interval(self) -> int:
return self.getint('zoe_scheduler', 'status_refresh_interval')
def docker_swarm_manager(self) -> str:
return self.get('zoe_scheduler', 'swarm_manager_url')
def docker_private_registry(self) -> str:
return self.get('zoe_scheduler', 'docker_private_registry')
def ipc_listen_port(self) -> int:
return self.getint('zoe_scheduler', 'ipc_listen_port')
def ipc_listen_address(self) -> str:
return self.get('zoe_scheduler', 'ipc_listen_address')
def ddns_keyfile(self) -> str:
return self.get('zoe_scheduler', 'ddns_keyfile')
def ddns_server(self) -> str:
return self.get('zoe_scheduler', 'ddns_server')
def ddns_domain(self) -> str:
return self.get('zoe_scheduler', 'ddns_domain')
def conf_init(config_file=None) -> ZoeConfig:
global _zoeconf
_zoeconf = ZoeConfig()
if config_file is None:
return _zoeconf
def zoe_conf() -> ZoeConfig:
return _zoeconf
class ZoeException(Exception):
def __init__(self):
self.value = 'Something happened'
def __str__(self):
return repr(self.value)
class CannotCreateCluster(ZoeException):
def __init__(self, application):
self.value = "Cannot create a cluster for application {}".format(
class InvalidApplicationDescription(ZoeException):
def __init__(self, msg):
self.value = msg
class DDNSUpdateFailed(ZoeException):
def __init__(self, msg):
self.value = msg
__version__ = '0.8.91'
from io import BytesIO
import logging
import zipfile
import requests
import requests.exceptions
from common.configuration import zoe_conf
log = logging.getLogger(__name__)
def generate_storage_url(obj_id: int, kind: str) -> str:
return zoe_conf().object_storage_url + '/{}/{}'.format(kind, obj_id)
def put(obj_id, kind, data: bytes):
url = zoe_conf().object_storage_url + '/{}/{}'.format(kind, obj_id)
files = {'file': data}
try:, files=files)
except requests.exceptions.ConnectionError:
log.error("Cannot connect to {} to POST the binary file".format(url))
def get(obj_id, kind) -> bytes:
url = zoe_conf().object_storage_url + '/{}/{}'.format(kind, obj_id)
r = requests.get(url)
except requests.exceptions.ConnectionError:
log.error("Cannot connect to {} to GET the binary file".format(url))
return None
return r.content
def check(obj_id, kind) -> bool:
url = zoe_conf().object_storage_url + '/{}/{}'.format(kind, obj_id)
r = requests.head(url)
except requests.exceptions.ConnectionError:
return False
return r.status_code == 200
def delete(obj_id, kind):
url = zoe_conf().object_storage_url + '/{}/{}'.format(kind, obj_id)
except requests.exceptions.ConnectionError:
log.error("Cannot connect to {} to DELETE the binary file".format(url))
def logs_archive_create(execution_id: int, logs: list):
zipdata = BytesIO()
with zipfile.ZipFile(zipdata, "w", compression=zipfile.ZIP_DEFLATED) as logzip:
for c in logs:
fname = c[0] + "-" + c[1] + ".txt"
logzip.writestr(fname, c[2])
put(execution_id, "logs", zipdata.getvalue())
......@@ -15,13 +15,11 @@
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(os.path.join(os.path.basename(__file__), "..", "..")))
from common.version import __version__
# -- General configuration ------------------------------------------------
......@@ -60,9 +58,9 @@ author = 'Daniele Venzano'
# built documents.
# The short X.Y version.
version = __version__
version = '0.8.92'
# The full version, including alpha/beta/rc tags.
release = __version__
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#!/usr/bin/env bash
set -e
rm -Rf dist/ build/ zoe-analytics.egg-info
python3 sdist
python3 bdist_wheel
twine upload -r pypi dist/*
from setuptools import setup, find_packages
version = {}
with open("common/") as fp:
exec(, version)
version = version['__version__']
long_description = open('README.rst').read()
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across and the project code, see
description='Zoe - Analytics on demand',
# The project's main homepage.
# Author details
author='Daniele Venzano',
# Choose your license
license='Apache 2.0',
# See
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: IPython',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Education',
'Operating System :: POSIX :: Linux',
'Topic :: Software Development',
'Topic :: System :: Distributed Computing',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: Apache Software License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
# What does your project relate to?
keywords='spark analytics docker swarm containers notebook',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['scripts', 'tests']),
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
'dev': ['Sphinx', 'wheel', 'twine'],
'test': ['pytest-cov', 'pytest'],
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in as well.
'': ['*.sh', '*.conf', '*.rst', '*.css', '*.js', '*.html'],
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# # noqa
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
# data_files=[('my_data', ['data/data_file'])],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow