...
 
Commits (142)
......@@ -62,3 +62,4 @@ state.zoe
/zoe*.conf
zoepass.csv
/docker.conf
/kube.conf
......@@ -20,7 +20,7 @@ cache:
pylint:
stage: static-test
image: python:3.4
image: python:3.5
script:
- pip install -U pip setuptools
- pip install -r requirements.txt
......@@ -30,8 +30,8 @@ pylint:
unittests:
stage: static-test
services:
- postgres:9.3
image: python:3.4
- postgres:9.5
image: python:3.5
script:
- pip install -U pip setuptools
- pip install -r requirements.txt
......@@ -40,7 +40,7 @@ unittests:
docs-test:
stage: static-test
image: python:3.4
image: python:3.5
script:
- pip install -U pip setuptools
- pip install -r requirements.txt
......@@ -57,66 +57,12 @@ images:
script:
- docker build --pull -t zoerepo/${ZOE_TEST_IMAGE} -f Dockerfile.test .
- docker push zoerepo/${ZOE_TEST_IMAGE}
- docker rm -f nginx0-1-integration_test || true
api-test:
stage: integration-test
services:
- postgres:9.3
- postgres:9.5
image: zoerepo/${ZOE_TEST_IMAGE}
script:
- echo 'admin,admin,admin' > zoepass.csv
- pwd
- ls
- bash ./run_integration_tests.sh
mirror-github:
stage: deploy
image: python:3.4
before_script:
- which ssh-agent || ( apt-get update -y && apt-get install openssh-client -y )
- eval $(ssh-agent -s)
- ssh-add <(echo "$SSH_PRIVATE_KEY")
- mkdir -p ~/.ssh
- 'echo -e "Host *\n\tStrictHostKeyChecking no\n\n" > ~/.ssh/config'
script:
- ssh -A ubuntu@${STAGING_IP} /home/ubuntu/mirror-github.sh
docs:
stage: deploy
image: zoerepo/${ZOE_TEST_IMAGE}
before_script:
- which ssh-agent || ( apt-get update -y && apt-get install openssh-client -y )
- eval $(ssh-agent -s)
- ssh-add <(echo "$SSH_PRIVATE_KEY")
- mkdir -p ~/.ssh
- 'echo -e "Host *\n\tStrictHostKeyChecking no\n\n" > ~/.ssh/config'
- apt-get update -y && apt-get install rsync -y
script:
- sh ./build_docs.sh
- rsync -avz docs/_build/html/ ubuntu@${STAGING_IP}:${WEB_STAGING_PATH}/docs
environment:
name: staging
only:
- master
zoe:
stage: deploy
image: zoerepo/${ZOE_TEST_IMAGE}
before_script:
- which ssh-agent || ( apt-get update -y && apt-get install openssh-client -y )
- eval $(ssh-agent -s)
- ssh-add <(echo "$SSH_PRIVATE_KEY")
- mkdir -p ~/.ssh
- 'echo -e "Host *\n\tStrictHostKeyChecking no\n\n" > ~/.ssh/config'
- apt-get update -y && apt-get install rsync -y
script:
- ssh ubuntu@${STAGING_IP} sudo chown -R ubuntu:ubuntu /srv/zoe/*
- rsync -avz --delete . ubuntu@${STAGING_IP}:${ZOE_STAGING_PATH}
- ssh ubuntu@${STAGING_IP} sudo pip install --upgrade -r /srv/zoe/requirements.txt
- ssh ubuntu@${STAGING_IP} /home/ubuntu/clean_zoe_db.sh
- ssh ubuntu@${STAGING_IP} sudo supervisorctl restart zoe-api
- ssh ubuntu@${STAGING_IP} sudo supervisorctl restart zoe-master
environment:
name: staging
only:
- master
......@@ -50,7 +50,7 @@ confidence=
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
disable=line-too-long,invalid-sequence-index,parameter-unpacking,unpacking-in-except,backtick,long-suffix,raw-checker-failed,bad-inline-option,locally-disabled,locally-enabled,file-ignored,suppressed-message,deprecated-pragma,no-self-use,too-many-instance-attributes,too-few-public-methods,too-many-public-methods,too-many-branches,too-many-statements,fixme,global-statement,logging-format-interpolation,apply-builtin,basestring-builtin,buffer-builtin,cmp-builtin,coerce-builtin,execfile-builtin,file-builtin,long-builtin,raw_input-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,no-absolute-import,old-division,dict-iter-method,dict-view-method,next-method-called,metaclass-assignment,indexing-exception,reload-builtin,oct-method,hex-method,nonzero-method,cmp-method,input-builtin,round-builtin,intern-builtin,unichr-builtin,map-builtin-not-iterating,zip-builtin-not-iterating,range-builtin-not-iterating,filter-builtin-not-iterating,using-cmp-argument,eq-without-hash,div-method,idiv-method,rdiv-method,exception-message-attribute,sys-max-int,bad-python3-import,deprecated-string-function,deprecated-str-translate-call,len-as-condition,no-else-return,arguments-differ
disable=line-too-long,invalid-sequence-index,parameter-unpacking,unpacking-in-except,backtick,long-suffix,raw-checker-failed,bad-inline-option,locally-disabled,locally-enabled,file-ignored,suppressed-message,deprecated-pragma,no-self-use,too-many-instance-attributes,too-few-public-methods,too-many-public-methods,too-many-branches,too-many-statements,fixme,global-statement,logging-format-interpolation,apply-builtin,basestring-builtin,buffer-builtin,cmp-builtin,coerce-builtin,execfile-builtin,file-builtin,long-builtin,raw_input-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,no-absolute-import,old-division,dict-iter-method,dict-view-method,next-method-called,metaclass-assignment,indexing-exception,reload-builtin,oct-method,hex-method,nonzero-method,cmp-method,input-builtin,round-builtin,intern-builtin,unichr-builtin,map-builtin-not-iterating,zip-builtin-not-iterating,range-builtin-not-iterating,filter-builtin-not-iterating,using-cmp-argument,eq-without-hash,div-method,idiv-method,rdiv-method,exception-message-attribute,sys-max-int,bad-python3-import,deprecated-string-function,deprecated-str-translate-call,len-as-condition,no-else-return,arguments-differ,no-else-raise,no-value-for-parameter
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
......
# Zoe Changelog
## Version 2018.12
* Implement a disabled label that lets admins disable hosts for maintenance reasons
* Support running behind a reverse proxy
* Option to get usage metrics from influxdb/telegraf
* Show termination reasons on the web interface
* GitLab authentication via OAuth2
* Add a PAM login option
* Implement alternative to websockets for log streaming to the web interface
* Implement users, roles and quotas
* Implement pagination for the execution list
* Implement RANDOM and WATERFILL placement policies
* Dynamic job size calculation lets prevents starvation of jobs waiting in the queue
* Perform synchronous termination at the end of the scheduler loop to prevent race conditions
* Removed deprecated features: Swarm back-end and the simple scheduler
## Version 2017.12
* New Docker Engine back-end, the Swarm back-end is now deprecated
......
......@@ -2,10 +2,8 @@
URL: [https://hub.docker.com/r/jupyter/r-notebook/](https://hub.docker.com/r/jupyter/r-notebook/)
* Jupyter Notebook 5.0.x
* Conda R v3.3.x and channel
* Jupyter Notebook and JupyterLab
* Conda R
* plyr, devtools, shiny, rmarkdown, forecast, rsqlite, reshape2, nycflights13, caret, rcurl, and randomforest pre-installed
* The tidyverse R packages are also installed, including ggplot2, dplyr, tidyr, readr, purrr, tibble, stringr, lubridate, and broom
Please note that you need to retrieve the secret key from the service logs to be able to access the notebooks.
zapps/rdatasci:10396
......@@ -5,8 +5,9 @@
"category": "Data science",
"readable_descr": "README-r.md",
"name": "R notebook",
"description": "r-notebook.json",
"parameters": []
"description": "rdatasci.json",
"parameters": [],
"disabled_for": ["student"]
}
]
}
......
{
"name": "microsoft-mls",
"name": "rdatasci",
"services": [
{
"command": null,
"environment": [
[
"ACCEPT_EULA",
"yes"
]
],
"command": "/usr/local/bin/start_notebook.sh",
"environment": [],
"essential_count": 1,
"image": "microsoft/mmlspark:0.10",
"image": "zapps/rdatasci:10396",
"monitor": true,
"name": "mls-notebook",
"name": "r-notebook",
"ports": [
{
"name": "Notebook web interface",
"name": "Jupyter Notebook interface",
"port_number": 8888,
"protocol": "tcp",
"url_template": "http://{ip_port}/"
"proxy": true,
"url_template": "http://{ip_port}{proxy_path}"
}
],
"replicas": 1,
"resources": {
"cores": {
"max": 4,
"min": 4
"max": 2,
"min": 2
},
"memory": {
"max": 6442450944,
......
# Notebook for Data Science
This ZApp contains a Jupyter Notebook with a Python 3.5 kernel and the following libraries:
* Tensorflow 1.10.1, Tensorboard 1.10.0
* Pytorch and TorchVision 0.4.1
* pandas, matplotlib, scipy, seaborn, scikit-learn, scikit-image, sympy, cython, patsy, statsmodel, cloudpickle, dill, numba, bokeh
The GPU version contains also CUDA 9.0 and tensorflow with GPU support
## Customizations
### Adding Python libraries
To install additional libraries you can add the following code on top of your notebook:
import subprocess
import sys
def install(package):
subprocess.call([sys.executable, "-m", "pip", "--user", "install", package])
and call the `install(<package name>)` function to install all packages you need.
Finally restart the kernel to load the modules you just installed.
### Running your own script
By modifying the `command` parameter in the JSON file you can tell Zoe to run your own script instead of the notebook.
In this ZApp the default command is:
"command": "jupyter lab --no-browser --NotebookApp.token='' --allow-root --ip=0.0.0.0"
If you change the JSON and write:
"command": "/mnt/workspace/myscript.sh"
Zoe will run myscript.sh instead of running the Jupyter notebook. In this way you can:
* transform an interactive notebook ZApp into a batch one, with exactly the same libraries and environment
* perform additional setup before starting the notebook. In this case you will have to add the jupyter lab command defined above at the end of your script.
# Notebook for MALIS Eurecom course
MALIS: MAchine Leaning and Intelligent Systems
This ZApp contains a Jupyter Notebook with a Python 3.5 kernel and the following libraries:
* Tensorflow 1.10.1, Tensorboard 1.10.0
* Pytorch and TorchVision 0.4.1
* pandas, matplotlib, scipy, seaborn, scikit-learn, scikit-image, sympy, cython, patsy, statsmodel, cloudpickle, dill, numba, bokeh
zapps/pydatasci:10396
zapps/pydatasci-gpu:10396
{
"version": 1,
"zapps": [
{
"category": "Data science",
"readable_descr": "README-datascience.md",
"name": "Data science notebook",
"description": "pydatasci.json",
"parameters": [],
"disabled_for": ["student"]
},
{
"category": "Labs",
"readable_descr": "README-malis.md",
"name": "MALIS",
"description": "pydatasci-malis.json",
"parameters": []
},
{
"category": "Data science",
"readable_descr": "README-datascience.md",
"name": "Data science notebook GPU",
"description": "pydatasci-gpu.json",
"parameters": [
{
"kind": "environment",
"name": "NVIDIA_VISIBLE_DEVICES",
"readable_name": "GPU",
"description": "Which GPU to enable for this execution (e.g. all: all GPUs, 0: just GPU #0, 0,2: GPU #0 and #2)",
"type": "string",
"default": "all"
}
],
"disabled_for": ["student"]
}
]
}
{
"name": "tf-google-gpu",
"name": "pydatasci-gpu",
"services": [
{
"command": null,
"command": "/usr/local/bin/start_notebook.sh",
"environment": [
[
"NVIDIA_VISIBLE_DEVICES",
......@@ -10,22 +10,23 @@
]
],
"essential_count": 1,
"image": "gcr.io/tensorflow/tensorflow:1.3.0-gpu-py3",
"image": "zapps/pydatasci-gpu:10396",
"labels": [
"gpu"
],
"monitor": true,
"name": "tf-jupyter",
"name": "py-notebook",
"ports": [
{
"name": "Tensorboard web interface",
"port_number": 6006,
"name": "Jupyter Notebook interface",
"port_number": 8888,
"protocol": "tcp",
"url_template": "http://{ip_port}/"
"proxy": true,
"url_template": "http://{ip_port}{proxy_path}"
},
{
"name": "Notebook web interface",
"port_number": 8888,
"name": "Tensorboard",
"port_number": 6006,
"protocol": "tcp",
"url_template": "http://{ip_port}/"
}
......@@ -33,12 +34,12 @@
"replicas": 1,
"resources": {
"cores": {
"max": 4,
"min": 4
"max": 2,
"min": 2
},
"memory": {
"max": 34359738368,
"min": 34359738368
"max": 6442450944,
"min": 6442450944
}
},
"startup_order": 0,
......
{
"name": "stnet-google",
"name": "pydatasci",
"services": [
{
"command": null,
"command": "/usr/local/bin/start_notebook.sh",
"environment": [],
"essential_count": 1,
"image": "tensorflow/syntaxnet",
"image": "zapps/pydatasci:10396",
"labels": [
"labs"
],
"monitor": true,
"name": "tf-jupyter",
"name": "py-notebook",
"ports": [
{
"name": "Tensorboard web interface",
"port_number": 6006,
"name": "Jupyter Notebook interface",
"port_number": 8888,
"protocol": "tcp",
"url_template": "http://{ip_port}/"
"proxy": true,
"url_template": "http://{ip_port}{proxy_path}"
},
{
"name": "Notebook web interface",
"port_number": 8888,
"name": "Tensorboard",
"port_number": 6006,
"protocol": "tcp",
"url_template": "http://{ip_port}/"
}
......@@ -25,12 +29,12 @@
"replicas": 1,
"resources": {
"cores": {
"max": 4,
"min": 4
"max": 2,
"min": 2
},
"memory": {
"max": 34359738368,
"min": 34359738368
"max": 6442450944,
"min": 6442450944
}
},
"startup_order": 0,
......
{
"name": "mag-google",
"name": "pydatasci",
"services": [
{
"command": null,
"command": "/usr/local/bin/start_notebook.sh",
"environment": [],
"essential_count": 1,
"image": "tensorflow/magenta",
"image": "zapps/pydatasci:10396",
"monitor": true,
"name": "tf-jupyter",
"name": "py-notebook",
"ports": [
{
"name": "Tensorboard web interface",
"port_number": 6006,
"name": "Jupyter Notebook interface",
"port_number": 8888,
"protocol": "tcp",
"url_template": "http://{ip_port}/"
"proxy": true,
"url_template": "http://{ip_port}{proxy_path}"
},
{
"name": "Notebook web interface",
"port_number": 8888,
"name": "Tensorboard",
"port_number": 6006,
"protocol": "tcp",
"url_template": "http://{ip_port}/"
}
......@@ -25,12 +26,12 @@
"replicas": 1,
"resources": {
"cores": {
"max": 4,
"min": 4
"max": 2,
"min": 2
},
"memory": {
"max": 34359738368,
"min": 34359738368
"max": 6442450944,
"min": 6442450944
}
},
"startup_order": 0,
......
# Rapids.ai
URL: [http://rapids.ai/index.html](http://rapids.ai/index.html)
You can find Example notebooks in /rapids.
zapps/rapidsai:10396
......@@ -2,11 +2,13 @@
"version": 1,
"zapps": [
{
"category": "Machine learning",
"name": "Microsoft Machine Learning for Spark",
"description": "microsoft-mls.json",
"category": "Third party",
"readable_descr": "README.md",
"parameters": []
"name": "Rapids.AI",
"description": "rapidsai.json",
"parameters": [],
"disabled_for": ["student"]
}
]
}
{
"name": "pytorch-notebook",
"name": "rapidsai",
"services": [
{
"command": null,
"command": "/usr/local/bin/start_rapids.sh",
"environment": [
[
"NB_UID",
"1000"
],
[
"HOME",
"/mnt/workspace"
"NVIDIA_VISIBLE_DEVICES",
"all"
]
],
"essential_count": 1,
"image": "zapps/pytorch:4761",
"image": "zapps/rapidsai:10396",
"labels": [
"gpu"
],
"monitor": true,
"name": "jupyter",
"name": "rapidsai-nb",
"ports": [
{
"name": "Jupyter Notebook interface",
"port_number": 8888,
"protocol": "tcp",
"url_template": "http://{ip_port}/"
"proxy": true,
"url_template": "http://{ip_port}{proxy_path}"
}
],
"replicas": 1,
"resources": {
"cores": {
"max": 4,
"min": 4
"max": 2,
"min": 2
},
"memory": {
"max": 4294967296,
"min": 4294967296
"max": 6442450944,
"min": 6442450944
}
},
"startup_order": 0,
"total_count": 1,
"volumes": [],
"work_dir": "/mnt/workspace"
"volumes": []
}
],
"size": 512,
......
......@@ -14,7 +14,7 @@
]
],
"essential_count": 1,
"image": "zapps/boinc:4759",
"image": "zapps/boinc:10396",
"monitor": true,
"name": "boinc-client",
"ports": [],
......
......@@ -2,7 +2,7 @@
"version": 1,
"zapps": [
{
"category": "Non-interactive",
"category": "Third party",
"name": "Boinc client",
"description": "boinc.json",
"readable_descr": "README.md",
......@@ -23,7 +23,8 @@
"type": "string",
"default": null
}
]
],
"disabled_for": ["student"]
}
]
}
# Jupyter Notebook image
This image contains the Jupyter notebook configured with Pythen and a Spark client. It is used by Zoe, the Container Analytics as a
Service system to create on-demand notebooks connected to containerized Spark clusters.
Zoe can be found at: https://github.com/DistributedSystemsGroup/zoe
## Setup
The Dockerfile runs a start script that configures the Notebook using these environment variables:
* SPARK\_MASTER\_IP: IP address of the Spark master this notebook should use for its kernel
* PROXY\_ID: string to use as a prefix for URL paths, for reverse proxying
* SPARK\_EXECUTOR\_RAM: How much RAM to use for each executor spawned by the notebook
# Spark Scala master image
This image contains the Scala master process. It is used by Zoe, the Container Analytics as a
Service system to create on-demand Spark clusers in Spark standalone mode.
Zoe can be found at: https://github.com/DistributedSystemsGroup/zoe
## Setup
The Dockerfile automatically starts the Spark master process when the container is run.
# Spark worker image
This image contains the Scala worker process. It is used by Zoe, the Container Analytics as a
Service system to create on-demand Spark clusters in standalone mode.
Zoe can be found at: https://github.com/DistributedSystemsGroup/zoe
## Setup
The Dockerfile runs the worker process when run. The following options can be passed via environment variables:
* SPARK\_MASTER\_IP: IP address of the Spark master this notebook should use for its kernel
* SPARK\_WORKER\_RAM: How much RAM the worker can use (default is 4g)
* SPARK\_WORKER\_CORES: How many cores can be used by the worker process (default is 4)
{
"name": "clouds-lab-spark",
"services": [
{
"command": null,
"environment": [
[
"SPARK_MASTER_IP",
"{dns_name#self}"
],
[
"HADOOP_USER_NAME",
"{user_name}"
],
[
"PYTHONHASHSEED",
"42"
]
],
"essential_count": 1,
"image": "zapps/spark2-master-clouds:4769",
"monitor": false,
"name": "spark-master",
"ports": [
{
"name": "Spark master web interface",
"port_number": 8080,
"protocol": "tcp",
"url_template": "http://{ip_port}/"
}
],
"replicas": 1,
"resources": {
"cores": {
"max": 1,
"min": 0.1
},
"memory": {
"max": 2684354560,
"min": 2147483648
}
},
"startup_order": 0,
"total_count": 1,
"volumes": []
},
{
"command": null,
"environment": [
[
"SPARK_WORKER_CORES",
"1"
],
[
"SPARK_WORKER_RAM",
"9126805504"
],
[
"SPARK_MASTER_IP",
"{dns_name#spark-master0}"
],
[
"SPARK_LOCAL_IP",
"{dns_name#self}"
],
[
"PYTHONHASHSEED",
"42"
],
[
"HADOOP_USER_NAME",
"{user_name}"
]
],
"essential_count": 1,
"image": "zapps/spark2-worker-clouds:4769",
"monitor": false,
"name": "spark-worker",
"ports": [],
"replicas": 1,
"resources": {
"cores": {
"max": 1,
"min": 1
},
"memory": {
"max": 10737418240,
"min": 6442450944
}
},
"startup_order": 1,
"total_count": 4,
"volumes": []
},
{
"command": null,
"environment": [
[
"SPARK_MASTER",
"spark://{dns_name#spark-master0}:7077"
],
[
"SPARK_EXECUTOR_RAM",
"9125756416"
],
[
"SPARK_DRIVER_RAM",
"2147483648"
],
[
"HADOOP_USER_NAME",
"{user_name}"
],
[
"NB_USER",
"{user_name}"
],
[
"PYTHONHASHSEED",
"42"
],
[
"NAMENODE_HOST",
"hdfs-namenode.zoe"
]
],
"essential_count": 1,
"image": "zapps/spark2-jupyter-notebook-clouds:4769",
"monitor": true,
"name": "spark-jupyter",
"ports": [
{
"name": "Jupyter Notebook interface",
"port_number": 8888,
"protocol": "tcp",
"url_template": "http://{ip_port}/"
}
],
"replicas": 1,
"resources": {
"cores": {
"max": 2,
"min": 0.5
},
"memory": {
"max": 8589934592,
"min": 6442450944
}
},
"startup_order": 0,
"total_count": 1,
"volumes": []
}
],
"size": 648,
"version": 3,
"will_end": false
}
\ No newline at end of file
{
"version": 1,
"zapps": [
{
"category": "Teaching and labs",
"name": "Clouds Lab",
"description": "clouds-lab-zapp.json",
"readable_descr": "README-clouds.md",
"parameters": [],
"guest_access": true
}
]
}
{
"name": "r-notebook",
"services": [
{
"command": null,
"environment": [
[
"NB_UID",
"1000"
],
[
"HOME",
"/mnt/workspace"
]
],
"essential_count": 1,
"image": "jupyter/r-notebook:latest",
"monitor": true,
"name": "jupyter",
"ports": [
{
"name": "Jupyter Notebook interface",
"port_number": 8888,
"protocol": "tcp",
"url_template": "http://{ip_port}/"
}
],
"replicas": 1,
"resources": {
"cores": {
"max": 4,
"min": 4
},
"memory": {
"max": 4294967296,
"min": 4294967296
}
},
"startup_order": 0,
"total_count": 1,
"volumes": [],
"work_dir": "/mnt/workspace"
}
],
"size": 512,
"version": 3,
"will_end": false
}
\ No newline at end of file
# Microsoft Machine Learning for Apache Spark ZApp
Unmodified [Microsoft MLS](https://github.com/Azure/mmlspark) as generated by Microsoft.
The image used contains a Jupyter Notebook.
Please note that you need to get the Notebook key from the service logs to be able to access the Notebook.
# Jupyter Notebook with PyTorch
URL: [https://hub.docker.com/r/jupyter/scipy-notebook/](https://hub.docker.com/r/jupyter/scipy-notebook/) and [http://pytorch.org/](http://pytorch.org/)
* Jupyter Notebook 5.0.x
* Conda Python 3.x environment
* pandas, matplotlib, scipy, seaborn, scikit-learn, scikit-image, sympy, cython, patsy, statsmodel, cloudpickle, dill, numba, bokeh, vincent, beautifulsoup, xlrd pre-installed
* PyTorch
Please note that you need to retrieve the secret key from the service logs to be able to access the notebooks.
{
"version": 1,
"zapps": [
{
"category": "Machine learning",
"readable_descr": "README-pytorch.md",
"name": "PyTorch notebook",
"description": "pytorch-notebook.json",
"parameters": [],
"logo": "pytorch.png"
}
]
}
{
"name": "pytorch-notebook",
"services": [
{
"command": null,
"environment": [
[
"NB_UID",
"1000"
],
[
"HOME",
"/mnt/workspace"
]
],
"essential_count": 1,
"image": "zapps/pytorch:latest",
"monitor": true,
"name": "jupyter",
"ports": [
{
"name": "Jupyter Notebook interface",
"port_number": 8888,
"protocol": "tcp",
"url_template": "http://{ip_port}/"
}
],
"replicas": 1,
"resources": {
"cores": {
"max": 4,
"min": 4
},
"memory": {
"max": 4294967296,
"min": 4294967296
}
},
"startup_order": 0,
"total_count": 1,
"volumes": [],
"work_dir": "/mnt/workspace"
}
],
"size": 512,
"version": 3,
"will_end": false
}
\ No newline at end of file
# Tensorflow ZApp
Unmodified [Google TensorFlow](https://www.tensorflow.org/) as generated by Google.
The image used contains TensorFlow 1.3 for Python 3. It can be used to run batch jobs by specifying a python or shell script to run from the workspace.
# Tensorflow ZApp
Unmodified [Google TensorFlow](https://www.tensorflow.org/) as generated by Google.
The image used contains TensorFlow 1.3 for Python 3 and a Jupyter Notebook.
Please note that you need to get the Notebook key from the service logs to be able to access the Notebook.
{
"version": 1,
"zapps": [
{
"category": "Machine learning",
"name": "Google TensorFlow notebook with GPU",
"description": "tf-google-gpu.json",
"readable_descr": "README-goog.md",
"parameters": []
},
{
"category": "Non-interactive",
"name": "Google TensorFlow batch with GPU",
"description": "tf-google-gpu.json",
"readable_descr": "README-batch.md",
"parameters": [
{
"kind": "command",
"name": "tf-jupyter",
"readable_name": "Command",
"description": "The Python script to run, relative to the workspace directory",
"type": "string",
"default": "./my-tf-app/main.py"
}
]
}
]
}
{
"name": "tf-google-gpu",
"services": [
{
"command": null,
"environment": [
[
"NVIDIA_VISIBLE_DEVICES",
"all"
]
],
"essential_count": 1,
"image": "gcr.io/tensorflow/tensorflow:1.3.0-gpu-py3",
"labels": [
"gpu"
],
"monitor": true,
"name": "tf-jupyter",
"ports": [
{
"name": "Tensorboard web interface",
"port_number": 6006,
"protocol": "tcp",
"url_template": "http://{ip_port}/"
},
{
"name": "Notebook web interface",
"port_number": 8888,
"protocol": "tcp",
"url_template": "http://{ip_port}/"
}
],
"replicas": 1,
"resources": {
"cores": {
"max": 4,
"min": 4
},
"memory": {
"max": 34359738368,
"min": 34359738368
}
},
"startup_order": 0,
"total_count": 1,
"volumes": []
}
],
"size": 512,
"version": 3,
"will_end": false
}
\ No newline at end of file
# Tensorflow Magenta
Unmodified [TensorFlow Magenta](https://magenta.tensorflow.org) as generated by Google.
Please note that you need to get the Notebook key from the service logs to be able to access the Notebook.
{
"name": "mag-google",
"services": [
{
"command": null,
"environment": [],
"essential_count": 1,
"image": "tensorflow/magenta",
"monitor": true,
"name": "tf-jupyter",
"ports": [
{
"name": "Tensorboard web interface",
"port_number": 6006,
"protocol": "tcp",
"url_template": "http://{ip_port}/"
},
{
"name": "Notebook web interface",
"port_number": 8888,
"protocol": "tcp",
"url_template": "http://{ip_port}/"
}
],
"replicas": 1,
"resources": {
"cores": {
"max": 4,
"min": 4
},
"memory": {
"max": 34359738368,
"min": 34359738368
}
},
"startup_order": 0,
"total_count": 1,
"volumes": []
}
],
"size": 512,
"version": 3,
"will_end": false
}
\ No newline at end of file
{
"version": 1,
"zapps": [
{
"category": "Tutorials and examples",
"name": "Magenta model",
"description": "mag-google.json",
"readable_descr": "README-magenta.md",
"parameters": []
}
]
}
# SyntaxNet TensorFlow
URL: [https://github.com/tensorflow/models/tree/master/syntaxnet](https://github.com/tensorflow/models/tree/master/syntaxnet)
DRAGNN (Dynamic Recurrent Acyclic Graphical Neural Networks) is a framework for building multi-task, dynamically connected computation graphs.
In practice, it uses SyntaxNet as its backend to perform several NLP tasks in a joint model (publicly released models include segmentation + POS tagging + syntactic parsing).
Please note that you need to get the Notebook key from the service logs to be able to access the Notebook.
{
"version": 1,
"zapps": [
{
"category": "Tutorials and examples",
"name": "DRAGNN SyntaxNet model",
"description": "stnet-google.json",
"readable_descr": "README-syntaxnet.md",
"parameters": []
}
]
}
{
"name": "stnet-google",
"services": [
{
"command": null,
"environment": [],
"essential_count": 1,
"image": "tensorflow/syntaxnet",
"monitor": true,
"name": "tf-jupyter",
"ports": [
{
"name": "Tensorboard web interface",
"port_number": 6006,
"protocol": "tcp",
"url_template": "http://{ip_port}/"
},
{
"name": "Notebook web interface",
"port_number": 8888,
"protocol": "tcp",
"url_template": "http://{ip_port}/"
}
],
"replicas": 1,
"resources": {
"cores": {
"max": 4,
"min": 4
},
"memory": {
"max": 34359738368,
"min": 34359738368
}
},
"startup_order": 0,
"total_count": 1,
"volumes": []
}
],
"size": 512,
"version": 3,
"will_end": false
}
\ No newline at end of file
# Jupyter Notebook Data Science Stack
URL: [https://hub.docker.com/r/jupyter/datascience-notebook/](https://hub.docker.com/r/jupyter/datascience-notebook/)
* Jupyter Notebook 5.0.x
* Conda Python 3.x environment
* pandas, matplotlib, scipy, seaborn, scikit-learn, scikit-image, sympy, cython, patsy, statsmodel, cloudpickle, dill, numba, bokeh pre-installed
* Conda R v3.3.x and channel
* plyr, devtools, shiny, rmarkdown, forecast, rsqlite, reshape2, nycflights13, caret, rcurl, and randomforest pre-installed
* The tidyverse R packages are also installed, including ggplot2, dplyr, tidyr, readr, purrr, tibble, stringr, lubridate, and broom
* Julia v0.5.x with Gadfly, RDatasets and HDF5 pre-installed
Please note that you need to retrieve the secret key from the service logs to be able to access the notebooks.
# Jupyter Notebook with PyTorch
URL: [https://hub.docker.com/r/jupyter/scipy-notebook/](https://hub.docker.com/r/jupyter/scipy-notebook/) and [http://pytorch.org/](http://pytorch.org/)
* Jupyter Notebook 5.0.x
* Conda Python 3.x environment
* pandas, matplotlib, scipy, seaborn, scikit-learn, scikit-image, sympy, cython, patsy, statsmodel, cloudpickle, dill, numba, bokeh, vincent, beautifulsoup, xlrd pre-installed
* PyTorch
Please note that you need to retrieve the secret key from the service logs to be able to access the notebooks.
# Jupyter Notebook R Stack
URL: [https://hub.docker.com/r/jupyter/r-notebook/](https://hub.docker.com/r/jupyter/r-notebook/)
* Jupyter Notebook 5.0.x
* Conda R v3.3.x and channel
* plyr, devtools, shiny, rmarkdown, forecast, rsqlite, reshape2, nycflights13, caret, rcurl, and randomforest pre-installed
* The tidyverse R packages are also installed, including ggplot2, dplyr, tidyr, readr, purrr, tibble, stringr, lubridate, and broom
Please note that you need to retrieve the secret key from the service logs to be able to access the notebooks.
# Jupyter Notebook Scientific Python Stack
URL: [https://hub.docker.com/r/jupyter/scipy-notebook/](https://hub.docker.com/r/jupyter/scipy-notebook/)
* Jupyter Notebook 5.0.x
* Conda Python 3.x environment
* pandas, matplotlib, scipy, seaborn, scikit-learn, scikit-image, sympy, cython, patsy, statsmodel, cloudpickle, dill, numba, bokeh, vincent, beautifulsoup, xlrd pre-installed
Please note that you need to retrieve the secret key from the service logs to be able to access the notebooks.
# Jupyter Notebook Scientific Python Stack + Tensorflow
Maintainer: Daniele Venzano <daniele.venzano@eurecom.fr>
URL: [https://hub.docker.com/r/jupyter/tensorflow-notebook/](https://hub.docker.com/r/jupyter/tensorflow-notebook/)
* Everything in [Scipy](https://github.com/jupyter/docker-stacks/tree/master/scipy-notebook) Notebook
* Tensorflow and Keras for Python 3.x (without GPU support)
Please note that you need to retrieve the secret key from the service logs to be able to access the notebooks.
{
"name": "datasci-notebook",
"services": [
{
"command": null,
"environment": [
[
"NB_UID",
"1000"
],
[
"HOME",
"/mnt/workspace"
]