Commit 3f71cbb0 authored by Daniele Venzano's avatar Daniele Venzano

Clean-up non-functional log streaming

parent 3ef53670
......@@ -5,16 +5,13 @@ Container logs
By default Zoe does not involve itself with the output from container processes. The logs can be retrieved with the usual Docker command ``docker logs`` while a container is alive, they are lost forever when the container is deleted. This solution however does not scale very well: to examine logs, users need to have access to the docker commandline tools and to the Swarm they are running in.
To setup a more convenient loggin solution, Zoe provides two alternatives:
1. Using the ``gelf-address`` option, Zoe can configure Docker to send the container outputs to an external destination in GELF format. GELF is the richest format supported by Docker and can be ingested by a number of tools such as Graylog and Logstash. When that option is set all containers created by Zoe will send their output (standard output and standard error) to the destination specified. Docker is instructed to add all Zoe-defined tags to the GELF messages, so that they can be aggregated by Zoe execution, Zoe user, etc. A popular logging stack that supports GELF is `ELK <https://www.elastic.co/products>`_.
2. Using the ``service-log-path`` option: logs will be stored in the directory specified when the execution terminates. The directory can be exposed via http or NFS to give access to users. On the other hand, if the log are too big, Zoe will spend a big amount of time saving the data and resources will not be freed until the the copying process has not finished.
To setup a more convenient logging solution, Zoe provides the ``gelf-address`` option. With it, Zoe can configure Docker to send the container outputs to an external destination in GELF format. GELF is the richest format supported by Docker and can be ingested by a number of tools such as Graylog and Logstash. When that option is set all containers created by Zoe will send their output (standard output and standard error) to the destination specified. Docker is instructed to add all Zoe-defined tags to the GELF messages, so that they can be aggregated by Zoe execution, Zoe user, etc. A popular logging stack that supports GELF is `ELK <https://www.elastic.co/products>`_.
In our experience, web interfaces like Kibana or Graylog are not useful to the Zoe users: they want to quickly dig through logs of their executions to find an error or an interesting number to correlate to some other number in some other log. The web interfaces (option 1) are slow and cluttered compared to using grep on a text file (option 2).
Which alternative is good for you depends on the usage pattern of your users, your log auditing requirements, etc.
Optional Kafka support
----------------------
What if you want your logs to go through Kafka
----------------------------------------------
Zoe also provides a Zoe Logger process, in case you prefer to use Kafka in your log pipeline. Each container output will be sent to its own topic, that Kafka will retain for seven days by default. With Kafka you can also monitor the container output in real-time, for example to debug your container images running in Zoe. In this case GELF is converted to syslog-like format for easier handling.
......
......@@ -35,21 +35,6 @@ from zoe_lib.executions import ZoeExecutionsAPI
from zoe_lib.applications import app_validate
def _log_stream_stdout(service_id, timestamps):
"""Stream the log on the standard output."""
service_api = ZoeServiceAPI(utils.zoe_url(), utils.zoe_user(), utils.zoe_pass())
try:
for line in service_api.get_logs(service_id):
if timestamps:
print(line[0], line[1])
else:
print(line[1])
except KeyboardInterrupt:
print('CTRL-C detected, exiting...')
return 'interrupt'
return 'stream_end'
def info_cmd(args_):
"""Queries the info endpoint."""
info_api = ZoeInfoAPI(utils.zoe_url(), utils.zoe_user(), utils.zoe_pass())
......@@ -109,23 +94,6 @@ def exec_start_cmd(args):
if current_status == 'running':
break
time.sleep(1)
monitor_service_id = None
service_api = ZoeServiceAPI(utils.zoe_url(), utils.zoe_user(), utils.zoe_pass())
for service_id in execution['services']:
service = service_api.get(service_id)
if service['description']['monitor']:
monitor_service_id = service['id']
break
print('\n>------ start of log streaming -------<\n')
why_stop = _log_stream_stdout(monitor_service_id, False)
print('\n>------ end of log streaming -------<\n')
if why_stop == 'stream_end':
print('Execution finished')
exit(0)
elif why_stop == 'interrupt':
print('Do not worry, your execution ({}) is still running.'.format(exec_id))
exit(1)
def exec_get_cmd(args):
......@@ -179,11 +147,6 @@ def exec_rm_cmd(args):
exec_api.delete(args.id)
def logs_cmd(args):
"""Retrieves and streams the logs of a service."""
_log_stream_stdout(args.service_id, args.timestamps)
def stats_cmd(args_):
"""Prints statistics on Zoe internals."""
stats_api = ZoeStatisticsAPI(utils.zoe_url(), utils.zoe_user(), utils.zoe_pass())
......@@ -212,7 +175,7 @@ def process_arguments() -> Tuple[ArgumentParser, Namespace]:
argparser_app_validate.set_defaults(func=app_validate_cmd)
argparser_exec_start = subparser.add_parser('start', help="Start an application")
argparser_exec_start.add_argument('-s', '--synchronous', action='store_true', help="Do not detach, wait for execution to finish, print main service log")
argparser_exec_start.add_argument('-s', '--synchronous', action='store_true', help="Do not detach immediately, wait for execution to start before exiting")
argparser_exec_start.add_argument('name', help="Name of the execution")
argparser_exec_start.add_argument('jsonfile', type=FileType("r"), help='Application description')
argparser_exec_start.set_defaults(func=exec_start_cmd)
......@@ -236,11 +199,6 @@ def process_arguments() -> Tuple[ArgumentParser, Namespace]:
argparser_execution_kill.add_argument('id', type=int, help="Execution id")
argparser_execution_kill.set_defaults(func=exec_rm_cmd)
argparser_logs = subparser.add_parser('logs', help="Streams the service logs")
argparser_logs.add_argument('service_id', type=int, help="Service id")
argparser_logs.add_argument('-t', '--timestamps', action='store_true', help="Prefix timestamps for each line")
argparser_logs.set_defaults(func=logs_cmd)
argparser_stats = subparser.add_parser('stats', help="Prints all available statistics")
argparser_stats.set_defaults(func=stats_cmd)
......
......@@ -45,20 +45,3 @@ class ZoeServiceAPI(ZoeAPIBase):
raise ZoeAPIException('service "{}" not found'.format(container_id))
else:
raise ZoeAPIException('error retrieving service {}'.format(container_id))
def get_logs(self, container_id):
"""
Retrieve service logs.
:param container_id:
:return:
"""
response, status_code = self._rest_get_stream('/service/logs/' + str(container_id))
if status_code == 200:
for line in response.iter_lines():
line = line.decode('utf-8').split(' ', 1)
yield line
elif status_code == 404:
raise ZoeAPIException('service "{}" not found'.format(container_id))
else:
raise ZoeAPIException('error retrieving service {}'.format(container_id))
......@@ -16,5 +16,5 @@
"""Versions."""
ZOE_VERSION = '0.20.0-beta'
ZOE_API_VERSION = '0.6'
ZOE_API_VERSION = '0.7'
ZOE_APPLICATION_FORMAT_VERSION = 2
......@@ -384,17 +384,3 @@ class SwarmClient:
'status': cont_info['State']
})
return conts
def logs(self, docker_id: str, stream: bool, follow=None):
"""
Retrieves the logs of the selected container.
:param docker_id:
:param stream:
:param follow:
:return:
"""
try:
return self.cli.logs(docker_id, stdout=True, stderr=True, follow=follow, stream=stream, timestamps=True)
except docker.errors.NullResource:
return None
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment