mirror of
https://github.com/ansible-collections/community.docker.git
synced 2025-12-16 03:52:05 +00:00
Improve parsing of warnings and errors (#811)
* Add logfmt message parser. * Parse logfmt formatted warnings. * Follow-up for #810. * Fix handling of warning and error messages. * Make Python 2 compatible. * Linting. Improving tests.
This commit is contained in:
parent
37e28b62d3
commit
6aea7efed9
@ -1,4 +1,5 @@
|
|||||||
bugfixes:
|
bugfixes:
|
||||||
- "docker_compose_v2 - do not fail when non-fatal errors occur. This can happen when pulling an image fails, but then the image can be built
|
- "docker_compose_v2 - do not fail when non-fatal errors occur. This can happen when pulling an image fails, but then the image can be built
|
||||||
for another service. Docker Compose emits an error in that case, but ``docker compose up`` still completes successfully
|
for another service. Docker Compose emits an error in that case, but ``docker compose up`` still completes successfully
|
||||||
(https://github.com/ansible-collections/community.docker/issues/807, https://github.com/ansible-collections/community.docker/pull/810)."
|
(https://github.com/ansible-collections/community.docker/issues/807, https://github.com/ansible-collections/community.docker/pull/810,
|
||||||
|
https://github.com/ansible-collections/community.docker/pull/811)."
|
||||||
|
|||||||
3
changelogs/fragments/811-compose-v2-logfmt.yml
Normal file
3
changelogs/fragments/811-compose-v2-logfmt.yml
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
bugfixes:
|
||||||
|
- "docker_compose_v2* modules - parse ``logfmt`` warnings emitted by Docker Compose (https://github.com/ansible-collections/community.docker/issues/787, https://github.com/ansible-collections/community.docker/pull/811)."
|
||||||
|
- "docker_compose_v2* modules - correctly parse ``Warning`` events emitted by Docker Compose (https://github.com/ansible-collections/community.docker/issues/807, https://github.com/ansible-collections/community.docker/pull/811)."
|
||||||
208
plugins/module_utils/_logfmt.py
Normal file
208
plugins/module_utils/_logfmt.py
Normal file
@ -0,0 +1,208 @@
|
|||||||
|
# Copyright (c) 2024, Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
"""
|
||||||
|
Parse go logfmt messages.
|
||||||
|
|
||||||
|
See https://pkg.go.dev/github.com/kr/logfmt?utm_source=godoc for information on the format.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
# The format is defined in https://pkg.go.dev/github.com/kr/logfmt?utm_source=godoc
|
||||||
|
# (look for "EBNFish")
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidLogFmt(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class _Mode(object):
|
||||||
|
GARBAGE = 0
|
||||||
|
KEY = 1
|
||||||
|
EQUAL = 2
|
||||||
|
IDENT_VALUE = 3
|
||||||
|
QUOTED_VALUE = 4
|
||||||
|
|
||||||
|
|
||||||
|
_ESCAPE_DICT = {
|
||||||
|
'"': '"',
|
||||||
|
'\\': '\\',
|
||||||
|
"'": "'",
|
||||||
|
'/': '/',
|
||||||
|
'b': '\b',
|
||||||
|
'f': '\f',
|
||||||
|
'n': '\n',
|
||||||
|
'r': '\r',
|
||||||
|
't': '\t',
|
||||||
|
}
|
||||||
|
|
||||||
|
_HEX_DICT = {
|
||||||
|
'0': 0,
|
||||||
|
'1': 1,
|
||||||
|
'2': 2,
|
||||||
|
'3': 3,
|
||||||
|
'4': 4,
|
||||||
|
'5': 5,
|
||||||
|
'6': 6,
|
||||||
|
'7': 7,
|
||||||
|
'8': 8,
|
||||||
|
'9': 9,
|
||||||
|
'a': 0xA,
|
||||||
|
'b': 0xB,
|
||||||
|
'c': 0xC,
|
||||||
|
'd': 0xD,
|
||||||
|
'e': 0xE,
|
||||||
|
'f': 0xF,
|
||||||
|
'A': 0xA,
|
||||||
|
'B': 0xB,
|
||||||
|
'C': 0xC,
|
||||||
|
'D': 0xD,
|
||||||
|
'E': 0xE,
|
||||||
|
'F': 0xF,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _is_ident(cur):
|
||||||
|
return cur > ' ' and cur not in ('"', '=')
|
||||||
|
|
||||||
|
|
||||||
|
class _Parser(object):
|
||||||
|
def __init__(self, line):
|
||||||
|
self.line = line
|
||||||
|
self.index = 0
|
||||||
|
self.length = len(line)
|
||||||
|
|
||||||
|
def done(self):
|
||||||
|
return self.index >= self.length
|
||||||
|
|
||||||
|
def cur(self):
|
||||||
|
return self.line[self.index]
|
||||||
|
|
||||||
|
def next(self):
|
||||||
|
self.index += 1
|
||||||
|
|
||||||
|
def prev(self):
|
||||||
|
self.index -= 1
|
||||||
|
|
||||||
|
def parse_unicode_sequence(self):
|
||||||
|
if self.index + 6 > self.length:
|
||||||
|
raise InvalidLogFmt('Not enough space for unicode escape')
|
||||||
|
if self.line[self.index:self.index + 2] != '\\u':
|
||||||
|
raise InvalidLogFmt('Invalid unicode escape start')
|
||||||
|
v = 0
|
||||||
|
for i in range(self.index + 2, self.index + 6):
|
||||||
|
v <<= 4
|
||||||
|
try:
|
||||||
|
v += _HEX_DICT[self.line[self.index]]
|
||||||
|
except KeyError:
|
||||||
|
raise InvalidLogFmt('Invalid unicode escape digit {digit!r}'.format(digit=self.line[self.index]))
|
||||||
|
self.index += 6
|
||||||
|
return chr(v)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_line(line, logrus_mode=False):
|
||||||
|
result = {}
|
||||||
|
parser = _Parser(line)
|
||||||
|
key = []
|
||||||
|
value = []
|
||||||
|
mode = _Mode.GARBAGE
|
||||||
|
|
||||||
|
def handle_kv(has_no_value=False):
|
||||||
|
k = ''.join(key)
|
||||||
|
v = None if has_no_value else ''.join(value)
|
||||||
|
result[k] = v
|
||||||
|
del key[:]
|
||||||
|
del value[:]
|
||||||
|
|
||||||
|
def parse_garbage(cur):
|
||||||
|
if _is_ident(cur):
|
||||||
|
return _Mode.KEY
|
||||||
|
parser.next()
|
||||||
|
return _Mode.GARBAGE
|
||||||
|
|
||||||
|
def parse_key(cur):
|
||||||
|
if _is_ident(cur):
|
||||||
|
key.append(cur)
|
||||||
|
parser.next()
|
||||||
|
return _Mode.KEY
|
||||||
|
elif cur == '=':
|
||||||
|
parser.next()
|
||||||
|
return _Mode.EQUAL
|
||||||
|
else:
|
||||||
|
if logrus_mode:
|
||||||
|
raise InvalidLogFmt('Key must always be followed by "=" in logrus mode')
|
||||||
|
handle_kv(has_no_value=True)
|
||||||
|
parser.next()
|
||||||
|
return _Mode.GARBAGE
|
||||||
|
|
||||||
|
def parse_equal(cur):
|
||||||
|
if _is_ident(cur):
|
||||||
|
value.append(cur)
|
||||||
|
parser.next()
|
||||||
|
return _Mode.IDENT_VALUE
|
||||||
|
elif cur == '"':
|
||||||
|
parser.next()
|
||||||
|
return _Mode.QUOTED_VALUE
|
||||||
|
else:
|
||||||
|
handle_kv()
|
||||||
|
parser.next()
|
||||||
|
return _Mode.GARBAGE
|
||||||
|
|
||||||
|
def parse_ident_value(cur):
|
||||||
|
if _is_ident(cur):
|
||||||
|
value.append(cur)
|
||||||
|
parser.next()
|
||||||
|
return _Mode.IDENT_VALUE
|
||||||
|
else:
|
||||||
|
handle_kv()
|
||||||
|
parser.next()
|
||||||
|
return _Mode.GARBAGE
|
||||||
|
|
||||||
|
def parse_quoted_value(cur):
|
||||||
|
if cur == '\\':
|
||||||
|
parser.next()
|
||||||
|
if parser.done():
|
||||||
|
raise InvalidLogFmt('Unterminated escape sequence in quoted string')
|
||||||
|
cur = parser.cur()
|
||||||
|
if cur in _ESCAPE_DICT:
|
||||||
|
value.append(_ESCAPE_DICT[cur])
|
||||||
|
elif cur != 'u':
|
||||||
|
raise InvalidLogFmt('Unknown escape sequence {seq!r}'.format(seq='\\' + cur))
|
||||||
|
else:
|
||||||
|
parser.prev()
|
||||||
|
value.append(parser.parse_unicode_sequence())
|
||||||
|
parser.next()
|
||||||
|
return _Mode.QUOTED_VALUE
|
||||||
|
elif cur == '"':
|
||||||
|
handle_kv()
|
||||||
|
parser.next()
|
||||||
|
return _Mode.GARBAGE
|
||||||
|
elif cur < ' ':
|
||||||
|
raise InvalidLogFmt('Control characters in quoted string are not allowed')
|
||||||
|
else:
|
||||||
|
value.append(cur)
|
||||||
|
parser.next()
|
||||||
|
return _Mode.QUOTED_VALUE
|
||||||
|
|
||||||
|
parsers = {
|
||||||
|
_Mode.GARBAGE: parse_garbage,
|
||||||
|
_Mode.KEY: parse_key,
|
||||||
|
_Mode.EQUAL: parse_equal,
|
||||||
|
_Mode.IDENT_VALUE: parse_ident_value,
|
||||||
|
_Mode.QUOTED_VALUE: parse_quoted_value,
|
||||||
|
}
|
||||||
|
while not parser.done():
|
||||||
|
mode = parsers[mode](parser.cur())
|
||||||
|
if mode == _Mode.KEY and logrus_mode:
|
||||||
|
raise InvalidLogFmt('Key must always be followed by "=" in logrus mode')
|
||||||
|
if mode == _Mode.KEY or mode == _Mode.EQUAL:
|
||||||
|
handle_kv(has_no_value=True)
|
||||||
|
elif mode == _Mode.IDENT_VALUE:
|
||||||
|
handle_kv()
|
||||||
|
elif mode == _Mode.QUOTED_VALUE:
|
||||||
|
raise InvalidLogFmt('Unterminated quoted string')
|
||||||
|
return result
|
||||||
@ -16,6 +16,10 @@ from ansible.module_utils.six.moves import shlex_quote
|
|||||||
|
|
||||||
from ansible_collections.community.docker.plugins.module_utils.util import DockerBaseClass
|
from ansible_collections.community.docker.plugins.module_utils.util import DockerBaseClass
|
||||||
from ansible_collections.community.docker.plugins.module_utils.version import LooseVersion
|
from ansible_collections.community.docker.plugins.module_utils.version import LooseVersion
|
||||||
|
from ansible_collections.community.docker.plugins.module_utils._logfmt import (
|
||||||
|
InvalidLogFmt as _InvalidLogFmt,
|
||||||
|
parse_line as _parse_logfmt_line,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
DOCKER_COMPOSE_FILES = ('compose.yaml', 'compose.yml', 'docker-compose.yaml', 'docker-compose.yml')
|
DOCKER_COMPOSE_FILES = ('compose.yaml', 'compose.yml', 'docker-compose.yaml', 'docker-compose.yml')
|
||||||
@ -56,6 +60,9 @@ DOCKER_STATUS_PULL = frozenset((
|
|||||||
DOCKER_STATUS_ERROR = frozenset((
|
DOCKER_STATUS_ERROR = frozenset((
|
||||||
'Error',
|
'Error',
|
||||||
))
|
))
|
||||||
|
DOCKER_STATUS_WARNING = frozenset((
|
||||||
|
'Warning',
|
||||||
|
))
|
||||||
DOCKER_STATUS_WAITING = frozenset((
|
DOCKER_STATUS_WAITING = frozenset((
|
||||||
'Waiting',
|
'Waiting',
|
||||||
))
|
))
|
||||||
@ -145,10 +152,23 @@ _RE_ERROR_EVENT = re.compile(
|
|||||||
r'\s+'
|
r'\s+'
|
||||||
r'(?P<status>%s)'
|
r'(?P<status>%s)'
|
||||||
r'\s*'
|
r'\s*'
|
||||||
|
r'(?P<msg>\S.*\S)?'
|
||||||
r'$'
|
r'$'
|
||||||
% '|'.join(re.escape(status) for status in DOCKER_STATUS_ERROR)
|
% '|'.join(re.escape(status) for status in DOCKER_STATUS_ERROR)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
_RE_WARNING_EVENT = re.compile(
|
||||||
|
r'^'
|
||||||
|
r'\s*'
|
||||||
|
r'(?P<resource_id>\S+)'
|
||||||
|
r'\s+'
|
||||||
|
r'(?P<status>%s)'
|
||||||
|
r'\s*'
|
||||||
|
r'(?P<msg>\S.*\S)?'
|
||||||
|
r'$'
|
||||||
|
% '|'.join(re.escape(status) for status in DOCKER_STATUS_WARNING)
|
||||||
|
)
|
||||||
|
|
||||||
_RE_CONTINUE_EVENT = re.compile(
|
_RE_CONTINUE_EVENT = re.compile(
|
||||||
r'^'
|
r'^'
|
||||||
r'\s*'
|
r'\s*'
|
||||||
@ -193,7 +213,7 @@ _RE_BUILD_PROGRESS_EVENT = re.compile(
|
|||||||
MINIMUM_COMPOSE_VERSION = '2.18.0'
|
MINIMUM_COMPOSE_VERSION = '2.18.0'
|
||||||
|
|
||||||
|
|
||||||
def _extract_event(line):
|
def _extract_event(line, warn_function=None):
|
||||||
match = _RE_RESOURCE_EVENT.match(line)
|
match = _RE_RESOURCE_EVENT.match(line)
|
||||||
if match is not None:
|
if match is not None:
|
||||||
status = match.group('status')
|
status = match.group('status')
|
||||||
@ -205,7 +225,7 @@ def _extract_event(line):
|
|||||||
match.group('resource_id'),
|
match.group('resource_id'),
|
||||||
status,
|
status,
|
||||||
msg,
|
msg,
|
||||||
)
|
), True
|
||||||
match = _RE_PULL_EVENT.match(line)
|
match = _RE_PULL_EVENT.match(line)
|
||||||
if match:
|
if match:
|
||||||
return Event(
|
return Event(
|
||||||
@ -213,15 +233,24 @@ def _extract_event(line):
|
|||||||
match.group('service'),
|
match.group('service'),
|
||||||
match.group('status'),
|
match.group('status'),
|
||||||
None,
|
None,
|
||||||
)
|
), True
|
||||||
match = _RE_ERROR_EVENT.match(line)
|
match = _RE_ERROR_EVENT.match(line)
|
||||||
if match:
|
if match:
|
||||||
return Event(
|
return Event(
|
||||||
ResourceType.UNKNOWN,
|
ResourceType.UNKNOWN,
|
||||||
match.group('resource_id'),
|
match.group('resource_id'),
|
||||||
match.group('status'),
|
match.group('status'),
|
||||||
None,
|
match.group('msg') or None,
|
||||||
)
|
), True
|
||||||
|
match = _RE_WARNING_EVENT.match(line)
|
||||||
|
if match:
|
||||||
|
if warn_function:
|
||||||
|
if match.group('msg'):
|
||||||
|
msg = '{rid}: {msg}'
|
||||||
|
else:
|
||||||
|
msg = 'Unspecified warning for {rid}'
|
||||||
|
warn_function(msg.format(rid=match.group('resource_id'), msg=match.group('msg')))
|
||||||
|
return None, True
|
||||||
match = _RE_PULL_PROGRESS.match(line)
|
match = _RE_PULL_PROGRESS.match(line)
|
||||||
if match:
|
if match:
|
||||||
return Event(
|
return Event(
|
||||||
@ -229,7 +258,7 @@ def _extract_event(line):
|
|||||||
match.group('layer'),
|
match.group('layer'),
|
||||||
match.group('status'),
|
match.group('status'),
|
||||||
None,
|
None,
|
||||||
)
|
), True
|
||||||
match = _RE_SKIPPED_EVENT.match(line)
|
match = _RE_SKIPPED_EVENT.match(line)
|
||||||
if match:
|
if match:
|
||||||
return Event(
|
return Event(
|
||||||
@ -237,7 +266,7 @@ def _extract_event(line):
|
|||||||
match.group('resource_id'),
|
match.group('resource_id'),
|
||||||
'Skipped',
|
'Skipped',
|
||||||
match.group('msg'),
|
match.group('msg'),
|
||||||
)
|
), True
|
||||||
match = _RE_BUILD_START_EVENT.match(line)
|
match = _RE_BUILD_START_EVENT.match(line)
|
||||||
if match:
|
if match:
|
||||||
return Event(
|
return Event(
|
||||||
@ -245,8 +274,23 @@ def _extract_event(line):
|
|||||||
match.group('resource_id'),
|
match.group('resource_id'),
|
||||||
'Building',
|
'Building',
|
||||||
None,
|
None,
|
||||||
)
|
), True
|
||||||
return None
|
return None, False
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_logfmt_event(line, warn_function=None):
|
||||||
|
try:
|
||||||
|
result = _parse_logfmt_line(line, logrus_mode=True)
|
||||||
|
except _InvalidLogFmt:
|
||||||
|
return None, False
|
||||||
|
if 'time' not in result or 'level' not in result or 'msg' not in result:
|
||||||
|
return None, False
|
||||||
|
if result['level'] == 'warning':
|
||||||
|
if warn_function:
|
||||||
|
warn_function(result['msg'])
|
||||||
|
return None, True
|
||||||
|
# TODO: no idea what to do with this
|
||||||
|
return None, False
|
||||||
|
|
||||||
|
|
||||||
def _warn_missing_dry_run_prefix(line, warn_missing_dry_run_prefix, warn_function):
|
def _warn_missing_dry_run_prefix(line, warn_missing_dry_run_prefix, warn_function):
|
||||||
@ -303,7 +347,7 @@ def parse_events(stderr, dry_run=False, warn_function=None):
|
|||||||
line = line[len(_DRY_RUN_MARKER):].lstrip()
|
line = line[len(_DRY_RUN_MARKER):].lstrip()
|
||||||
else:
|
else:
|
||||||
warn_missing_dry_run_prefix = True
|
warn_missing_dry_run_prefix = True
|
||||||
event = _extract_event(line)
|
event, parsed = _extract_event(line, warn_function=warn_function)
|
||||||
if event is not None:
|
if event is not None:
|
||||||
events.append(event)
|
events.append(event)
|
||||||
if event.status in DOCKER_STATUS_ERROR:
|
if event.status in DOCKER_STATUS_ERROR:
|
||||||
@ -312,6 +356,8 @@ def parse_events(stderr, dry_run=False, warn_function=None):
|
|||||||
error_event = None
|
error_event = None
|
||||||
_warn_missing_dry_run_prefix(line, warn_missing_dry_run_prefix, warn_function)
|
_warn_missing_dry_run_prefix(line, warn_missing_dry_run_prefix, warn_function)
|
||||||
continue
|
continue
|
||||||
|
elif parsed:
|
||||||
|
continue
|
||||||
match = _RE_BUILD_PROGRESS_EVENT.match(line)
|
match = _RE_BUILD_PROGRESS_EVENT.match(line)
|
||||||
if match:
|
if match:
|
||||||
# Ignore this
|
# Ignore this
|
||||||
@ -323,6 +369,11 @@ def parse_events(stderr, dry_run=False, warn_function=None):
|
|||||||
if index_event is not None:
|
if index_event is not None:
|
||||||
index, event = index_event
|
index, event = index_event
|
||||||
events[-1] = _concat_event_msg(event, match.group('msg'))
|
events[-1] = _concat_event_msg(event, match.group('msg'))
|
||||||
|
event, parsed = _extract_logfmt_event(line, warn_function=warn_function)
|
||||||
|
if event is not None:
|
||||||
|
events.append(event)
|
||||||
|
elif parsed:
|
||||||
|
continue
|
||||||
if error_event is not None:
|
if error_event is not None:
|
||||||
# Unparsable line that apparently belongs to the previous error event
|
# Unparsable line that apparently belongs to the previous error event
|
||||||
events[-1] = _concat_event_msg(error_event, line)
|
events[-1] = _concat_event_msg(error_event, line)
|
||||||
@ -397,9 +448,6 @@ def emit_warnings(events, warn_function):
|
|||||||
def is_failed(events, rc):
|
def is_failed(events, rc):
|
||||||
if rc:
|
if rc:
|
||||||
return True
|
return True
|
||||||
for event in events:
|
|
||||||
if event.status in DOCKER_STATUS_ERROR:
|
|
||||||
return True
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
100
tests/unit/plugins/module_utils/test__logfmt.py
Normal file
100
tests/unit/plugins/module_utils/test__logfmt.py
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
# Copyright (c) 2024, Felix Fontein <felix@fontein.de>
|
||||||
|
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||||
|
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from ansible_collections.community.docker.plugins.module_utils._logfmt import (
|
||||||
|
InvalidLogFmt,
|
||||||
|
parse_line,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
SUCCESS_TEST_CASES = [
|
||||||
|
(
|
||||||
|
'time="2024-02-02T08:14:10+01:00" level=warning msg="a network with name influxNetwork exists but was not'
|
||||||
|
' created for project \\"influxdb\\".\\nSet `external: true` to use an existing network"',
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
'time': '2024-02-02T08:14:10+01:00',
|
||||||
|
'level': 'warning',
|
||||||
|
'msg': 'a network with name influxNetwork exists but was not created for project "influxdb".\nSet `external: true` to use an existing network',
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'time="2024-02-02T08:14:10+01:00" level=warning msg="a network with name influxNetwork exists but was not'
|
||||||
|
' created for project \\"influxdb\\".\\nSet `external: true` to use an existing network"',
|
||||||
|
{'logrus_mode': True},
|
||||||
|
{
|
||||||
|
'time': '2024-02-02T08:14:10+01:00',
|
||||||
|
'level': 'warning',
|
||||||
|
'msg': 'a network with name influxNetwork exists but was not created for project "influxdb".\nSet `external: true` to use an existing network',
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'foo=bar a=14 baz="hello kitty" cool%story=bro f %^asdf',
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
'foo': 'bar',
|
||||||
|
'a': '14',
|
||||||
|
'baz': 'hello kitty',
|
||||||
|
'cool%story': 'bro',
|
||||||
|
'f': None,
|
||||||
|
'%^asdf': None,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'{"foo":"bar"}',
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
'{': None,
|
||||||
|
'foo': None,
|
||||||
|
':': None,
|
||||||
|
'bar': None,
|
||||||
|
'}': None,
|
||||||
|
},
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
FAILURE_TEST_CASES = [
|
||||||
|
(
|
||||||
|
'foo=bar a=14 baz="hello kitty" cool%story=bro f %^asdf',
|
||||||
|
{'logrus_mode': True},
|
||||||
|
'Key must always be followed by "=" in logrus mode',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'{}',
|
||||||
|
{'logrus_mode': True},
|
||||||
|
'Key must always be followed by "=" in logrus mode',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'[]',
|
||||||
|
{'logrus_mode': True},
|
||||||
|
'Key must always be followed by "=" in logrus mode',
|
||||||
|
),
|
||||||
|
(
|
||||||
|
'{"foo=bar": "baz=bam"}',
|
||||||
|
{'logrus_mode': True},
|
||||||
|
'Key must always be followed by "=" in logrus mode',
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('line, kwargs, result', SUCCESS_TEST_CASES)
|
||||||
|
def test_parse_line_success(line, kwargs, result):
|
||||||
|
res = parse_line(line, **kwargs)
|
||||||
|
print(repr(res))
|
||||||
|
assert res == result
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('line, kwargs, message', FAILURE_TEST_CASES)
|
||||||
|
def test_parse_line_success(line, kwargs, message):
|
||||||
|
with pytest.raises(InvalidLogFmt) as exc:
|
||||||
|
parse_line(line, **kwargs)
|
||||||
|
|
||||||
|
print(repr(exc.value.args[0]))
|
||||||
|
assert exc.value.args[0] == message
|
||||||
@ -175,6 +175,48 @@ EXTRA_TEST_CASES = [
|
|||||||
],
|
],
|
||||||
[],
|
[],
|
||||||
),
|
),
|
||||||
|
(
|
||||||
|
# https://github.com/ansible-collections/community.docker/issues/787
|
||||||
|
'2.20.3-logrus-warn',
|
||||||
|
'2.20.3',
|
||||||
|
False,
|
||||||
|
'time="2024-02-02T08:14:10+01:00" level=warning msg="a network with name influxNetwork exists but was not'
|
||||||
|
' created for project \\"influxdb\\".\\nSet `external: true` to use an existing network"\n',
|
||||||
|
[],
|
||||||
|
[
|
||||||
|
'a network with name influxNetwork exists but was not created for project "influxdb".\nSet `external: true` to use an existing network',
|
||||||
|
],
|
||||||
|
),
|
||||||
|
(
|
||||||
|
# https://github.com/ansible-collections/community.docker/issues/807
|
||||||
|
'2.20.3-image-warning-error',
|
||||||
|
'2.20.3',
|
||||||
|
False,
|
||||||
|
" dummy3 Warning \n"
|
||||||
|
" dummy2 Warning \n"
|
||||||
|
" dummy Error \n"
|
||||||
|
" dummy4 Warning Foo bar \n"
|
||||||
|
" dummy5 Error Bar baz bam \n",
|
||||||
|
[
|
||||||
|
Event(
|
||||||
|
'unknown',
|
||||||
|
'dummy',
|
||||||
|
'Error',
|
||||||
|
None,
|
||||||
|
),
|
||||||
|
Event(
|
||||||
|
'unknown',
|
||||||
|
'dummy5',
|
||||||
|
'Error',
|
||||||
|
'Bar baz bam',
|
||||||
|
),
|
||||||
|
],
|
||||||
|
[
|
||||||
|
'Unspecified warning for dummy3',
|
||||||
|
'Unspecified warning for dummy2',
|
||||||
|
'dummy4: Foo bar',
|
||||||
|
],
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
_ALL_TEST_CASES = EVENT_TEST_CASES + EXTRA_TEST_CASES
|
_ALL_TEST_CASES = EVENT_TEST_CASES + EXTRA_TEST_CASES
|
||||||
@ -193,5 +235,8 @@ def test_parse_events(test_id, compose_version, dry_run, stderr, events, warning
|
|||||||
|
|
||||||
collected_events = parse_events(stderr, dry_run=dry_run, warn_function=collect_warning)
|
collected_events = parse_events(stderr, dry_run=dry_run, warn_function=collect_warning)
|
||||||
|
|
||||||
assert events == collected_events
|
print(collected_events)
|
||||||
assert warnings == collected_warnings
|
print(collected_warnings)
|
||||||
|
|
||||||
|
assert collected_events == events
|
||||||
|
assert collected_warnings == warnings
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user