From d67f65bb2350378824bb029d999d875ad4dcdc00 Mon Sep 17 00:00:00 2001 From: Joachim Metz Date: Mon, 3 Apr 2017 18:46:03 +0200 Subject: [PATCH] Code review: 312320043: Added attribute container identifiers #771 --- config/dpkg/changelog | 2 +- docs/plaso.storage.rst | 8 + plaso/analysis/interface.py | 11 +- plaso/analysis/tagging.py | 1 + plaso/containers/event_sources.py | 2 - plaso/containers/events.py | 78 ++- plaso/containers/interface.py | 107 +++- plaso/containers/reports.py | 33 +- plaso/output/event_buffer.py | 6 +- plaso/serializer/json_serializer.py | 25 +- plaso/storage/gzip_file.py | 36 +- plaso/storage/identifiers.py | 38 ++ plaso/storage/zip_file.py | 867 ++++++++++++---------------- test_data/end_to_end/dynamic.log | 44 +- test_data/end_to_end/json.log | 44 +- test_data/end_to_end/json_line.log | 44 +- test_data/end_to_end/l2tcsv.log | 44 +- test_data/end_to_end/l2ttln.log | 44 +- test_data/end_to_end/rawpy.log | 395 ++++++------- test_data/end_to_end/tln.log | 44 +- test_data/pinfo_test.json.plaso | Bin 3501 -> 3502 bytes test_data/psort_test.json.plaso | Bin 13906 -> 13175 bytes tests/containers/events.py | 4 +- tests/frontend/preg.py | 15 +- tests/multi_processing/psort.py | 15 +- tests/serializer/json_serializer.py | 10 +- tests/storage/fake_storage.py | 6 +- tests/storage/gzip_file.py | 8 +- tests/storage/test_lib.py | 35 +- tests/storage/zip_file.py | 183 +++--- tools/pinfo_test.py | 23 +- tools/psteal_test.py | 14 +- utils/pylintrc | 4 +- 33 files changed, 1069 insertions(+), 1121 deletions(-) create mode 100644 plaso/storage/identifiers.py diff --git a/config/dpkg/changelog b/config/dpkg/changelog index 1bb88f15da..f3709b30fe 100644 --- a/config/dpkg/changelog +++ b/config/dpkg/changelog @@ -2,4 +2,4 @@ plaso (1.5.2-1) unstable; urgency=low * Auto-generated - -- Log2Timeline Mon, 03 Apr 2017 17:20:47 +0200 \ No newline at end of file + -- Log2Timeline Mon, 03 Apr 2017 18:46:02 +0200 \ No newline at end of file diff --git a/docs/plaso.storage.rst b/docs/plaso.storage.rst index 756746751c..bdf4eab900 100644 --- a/docs/plaso.storage.rst +++ b/docs/plaso.storage.rst @@ -28,6 +28,14 @@ plaso.storage.gzip_file module :undoc-members: :show-inheritance: +plaso.storage.identifiers module +-------------------------------- + +.. automodule:: plaso.storage.identifiers + :members: + :undoc-members: + :show-inheritance: + plaso.storage.interface module ------------------------------ diff --git a/plaso/analysis/interface.py b/plaso/analysis/interface.py index 68ad397ae2..4709fa8f7f 100644 --- a/plaso/analysis/interface.py +++ b/plaso/analysis/interface.py @@ -13,6 +13,7 @@ else: import queue as Queue # pylint: disable=import-error +# pylint: disable=wrong-import-position import requests # Some distributions unvendor urllib3 from the requests module, and we need to @@ -69,13 +70,15 @@ def _CreateEventTag(self, event, comment, labels): comment (str): event tag comment. labels (list[str]): event tag labels. """ - event_uuid = getattr(event, u'uuid', None) - event_tag = events.EventTag( - comment=comment, event_uuid=event_uuid) + event_identifier = event.GetIdentifier() + + event_tag = events.EventTag(comment=comment) + event_tag.SetEventIdentifier(event_identifier) event_tag.AddLabels(labels) + event_identifier_string = event_identifier.CopyToString() logging.debug(u'Created event tag: {0:s} for event: {1:s}'.format( - comment, event_uuid)) + comment, event_identifier_string)) return event_tag diff --git a/plaso/analysis/tagging.py b/plaso/analysis/tagging.py index b258f7f18c..2b56db53a5 100644 --- a/plaso/analysis/tagging.py +++ b/plaso/analysis/tagging.py @@ -208,6 +208,7 @@ def ExamineEvent(self, mediator, event): labels = list(efilter_api.getvalues(matched_labels)) event_tag = self._CreateEventTag(event, self._EVENT_TAG_COMMENT, labels) + mediator.ProduceEventTag(event_tag) self._number_of_event_tags += 1 diff --git a/plaso/containers/event_sources.py b/plaso/containers/event_sources.py index 63d812d166..bba052411a 100644 --- a/plaso/containers/event_sources.py +++ b/plaso/containers/event_sources.py @@ -16,7 +16,6 @@ class EventSource(interface.AttributeContainer): data_type (str): attribute container type indicator. file_entry_type (str): dfVFS file entry type. path_spec (dfvfs.PathSpec): path specification. - storage_session (int): storage session number or 0 if not set. """ CONTAINER_TYPE = u'event_source' DATA_TYPE = None @@ -31,7 +30,6 @@ def __init__(self, path_spec=None): self.data_type = self.DATA_TYPE self.file_entry_type = None self.path_spec = path_spec - self.storage_session = 0 class FileEntryEventSource(EventSource): diff --git a/plaso/containers/events.py b/plaso/containers/events.py index ca381d5cc0..631f9f578d 100644 --- a/plaso/containers/events.py +++ b/plaso/containers/events.py @@ -130,8 +130,8 @@ def __eq__(self, event_object): self.data_type != event_object.data_type): return False - attribute_names = set(self.__dict__.keys()) - if attribute_names != set(event_object.__dict__.keys()): + attribute_names = set(self.GetAttributeNames()) + if attribute_names != set(event_object.GetAttributeNames()): return False # Here we have to deal with "near" duplicates, so not all attributes @@ -171,7 +171,7 @@ def EqualityString(self): str: string representation of the event object that can be used for equality comparison. """ - attribute_names = set(self.__dict__.keys()) + attribute_names = set(self.GetAttributeNames()) fields = sorted(list(attribute_names.difference(self.COMPARE_EXCLUDE))) # TODO: Review this (after 1.1.0 release). Is there a better/more clean @@ -226,10 +226,11 @@ def GetAttributeNames(self): list[str]: attribute names. """ attribute_names = [] - for attribute_name in self.__dict__.keys(): - attribute_value = getattr(self, attribute_name, None) - if attribute_value is not None: - attribute_names.append(attribute_name) + for attribute_name, attribute_value in self.GetAttributes(): + if attribute_value is None: + continue + + attribute_names.append(attribute_name) return attribute_names @@ -237,22 +238,19 @@ def GetAttributeNames(self): class EventTag(interface.AttributeContainer): """Class to represent an event tag attribute container. - The event tag either needs to have an event_uuid defined or both - the store_number and store_index to be valid. If both defined - the store_number and store_index is preferred. - Attributes: comment (str): comments. + event_entry_index (int): serialized data stream entry index of the event, + this attribute is used by the ZIP and GZIP storage files to + uniquely identify the event linked to the tag. + event_stream_number (int): number of the serialized event stream, this + attribute is used by the ZIP and GZIP storage files to uniquely + identify the event linked to the tag. event_uuid (str): event identifier (UUID). labels (list[str]): labels, such as "malware", "application_execution". - store_index (int): store index of the corresponding event. - store_number (int): store number of the corresponding event. """ CONTAINER_TYPE = u'event_tag' - _ATTRIBUTE_NAMES = frozenset([ - u'comment', u'event_uuid', u'labels', u'store_index', u'store_number']) - _INVALID_LABEL_CHARACTERS_REGEX = re.compile(r'[^A-Za-z0-9_]') _VALID_LABEL_REGEX = re.compile(r'^[A-Za-z0-9_]+$') @@ -265,20 +263,12 @@ def __init__(self, comment=None, event_uuid=None): event_uuid (Optional[str]): event identifier (UUID). """ super(EventTag, self).__init__() + self._event_identifier = None self.comment = comment + self.event_entry_index = None + self.event_stream_number = None self.event_uuid = event_uuid self.labels = [] - # TODO: deprecate store number and index. - self.store_index = None - self.store_number = None - - @property - def string_key(self): - """str: string index key for this tag.""" - if self.event_uuid is not None: - return self.event_uuid - - return u'{0:d}:{1:d}'.format(self.store_number, self.store_index) def AddComment(self, comment): """Adds a comment to the event tag. @@ -342,13 +332,6 @@ def CopyToDict(self): result_dict = { u'labels': self.labels } - if (self.store_number is not None and self.store_index is not None and - self.store_number > -1 and self.store_index > -1): - result_dict[u'store_number'] = self.store_number - result_dict[u'store_index'] = self.store_index - else: - result_dict[u'event_uuid'] = self.event_uuid - if self.comment: result_dict[u'comment'] = self.comment @@ -371,18 +354,27 @@ def CopyTextToLabel(cls, text, prefix=u''): text = u'{0:s}{1:s}'.format(prefix, text) return cls._INVALID_LABEL_CHARACTERS_REGEX.sub(u'_', text) - def GetAttributes(self): - """Retrieves the attributes from the event tag object. + def GetEventIdentifier(self): + """Retrieves the identifier of the event associated with the event tag. - Attributes that are set to None are ignored. + The event identifier is a storage specific value that should not + be serialized. + + Returns: + AttributeContainerIdentifier: event identifier or None when not set. + """ + return self._event_identifier - Yields: - tuple[str, str]: event tag attribute name and value. + def SetEventIdentifier(self, event_identifier): + """Sets the identifier of the event associated with the event tag. + + The event identifier is a storage specific value that should not + be serialized. + + Args: + event_identifier (AttributeContainerIdentifier): event identifier. """ - for attribute_name in self._ATTRIBUTE_NAMES: - attribute_value = getattr(self, attribute_name, None) - if attribute_value is not None: - yield attribute_name, attribute_value + self._event_identifier = event_identifier manager.AttributeContainersManager.RegisterAttributeContainers([ diff --git a/plaso/containers/interface.py b/plaso/containers/interface.py index d9bf3ab891..5df1464bd9 100644 --- a/plaso/containers/interface.py +++ b/plaso/containers/interface.py @@ -4,8 +4,29 @@ from efilter.protocols import structured +class AttributeContainerIdentifier(object): + """The attribute container identifier. + + The identifier is used to uniquely identify attribute containers. + The value should be unique at runtime and in storage. + """ + + def __init__(self): + """Initializes an attribute container identifier.""" + super(AttributeContainerIdentifier, self).__init__() + self._identifier = id(self) + + def CopyToString(self): + """Copies the identifier to a string representation. + + Returns: + str: unique identifier or None. + """ + return u'{0:d}'.format(self._identifier) + + class AttributeContainer(object): - """Class that defines the attribute container interface. + """The attribute container interface. This is the the base class for those object that exists primarily as a container of attributes with basic accessors and mutators. @@ -13,22 +34,41 @@ class AttributeContainer(object): The CONTAINER_TYPE class attribute contains a string that identifies the container type e.g. the container type "event" identifiers an event object. + + Attributes are public class members of an serializable type. Protected + and private class members are not to be serialized. """ CONTAINER_TYPE = None + def __init__(self): + """Initializes an attribute container.""" + super(AttributeContainer, self).__init__() + self._identifier = AttributeContainerIdentifier() + self._session_identifier = None + def CopyToDict(self): """Copies the attribute container to a dictionary. Returns: - A dictionary containing the attribute container attributes. + dict[str, object]: attribute values per name. """ - dictionary = {} + return { + attribute_name: attribute_value + for attribute_name, attribute_value in self.GetAttributes()} + + def GetAttributeNames(self): + """Retrieves the names of all attributes. + + Returns: + list[str]: attribute names. + """ + attribute_names = [] for attribute_name in iter(self.__dict__.keys()): - attribute_value = getattr(self, attribute_name, None) - if attribute_value is not None: - dictionary[attribute_name] = attribute_value + if attribute_name.startswith(u'_'): + continue + attribute_names.append(attribute_name) - return dictionary + return attribute_names def GetAttributes(self): """Retrieves the attribute names and values. @@ -36,22 +76,55 @@ def GetAttributes(self): Attributes that are set to None are ignored. Yields: - A tuple containing an attribute name and value. + tuple[str, object]: attribute name and value. """ - for attribute_name in iter(self.__dict__.keys()): - attribute_value = getattr(self, attribute_name, None) - if attribute_value is not None: - yield attribute_name, attribute_value + for attribute_name, attribute_value in iter(self.__dict__.items()): + if attribute_name.startswith(u'_') or attribute_value is None: + continue - def GetAttributeNames(self): - """Retrieves the names of all attributes. + yield attribute_name, attribute_value - Attributes that are set to None are ignored. + def GetIdentifier(self): + """Retrieves the identifier. + + The identifier is a storage specific value that should not be serialized. + + Returns: + AttributeContainerIdentifier: an unique identifier for the container. + """ + return self._identifier + + def GetSessionIdentifier(self): + """Retrieves the session identifier. + + The session identifier is a storage specific value that should not + be serialized. Returns: - A list containing the attribute container attribute names. + str: session identifier. + """ + return self._session_identifier + + def SetIdentifier(self, identifier): + """Sets the identifier. + + The identifier is a storage specific value that should not be serialized. + + Args: + identifier (AttributeContainerIdentifier): identifier. + """ + self._identifier = identifier + + def SetSessionIdentifier(self, session_identifier): + """Sets the session identifier. + + The session identifier is a storage specific value that should not + be serialized. + + Args: + session_identifier (str): session identifier. """ - return [name for name, _ in list(self.GetAttributes())] + self._session_identifier = session_identifier # Efilter protocol definition to enable filtering of containers. diff --git a/plaso/containers/reports.py b/plaso/containers/reports.py index c8e27c7639..cd036698b2 100644 --- a/plaso/containers/reports.py +++ b/plaso/containers/reports.py @@ -10,15 +10,12 @@ class AnalysisReport(interface.AttributeContainer): """Class to represent an analysis report attribute container. Attributes: - filter_string: a string containing ??? - images: a list containing ??? - plugin_name: a string containing the name of the analysis plugin that - generated the report. - report_array: an array containing ??? - report_dict: a dictionary containing ??? - text: a string containing the report text or None. - time_compiled: a timestamp containing the date and time the report was - compiled. + filter_string (str): ??? + plugin_name (str): name of the analysis plugin that generated the report. + report_array (array[str]): ??? + report_dict (dict[str]): ??? + text (str): report text. + time_compiled (int): timestamp of the date and time the report was compiled. """ CONTAINER_TYPE = u'analysis_report' @@ -26,13 +23,12 @@ def __init__(self, plugin_name=None, text=None): """Initializes the analysis report. Args: - plugin_name: optional string containing the name of the analysis plugin - that generated the report. - text: optional string containing the report text. + plugin_name (Optional[str]): name of the analysis plugin that generated + the report. + text (Optional[str]): report text. """ super(AnalysisReport, self).__init__() self.filter_string = None - self.images = None self.plugin_name = plugin_name self.report_array = None self.report_dict = None @@ -44,11 +40,10 @@ def CopyToDict(self): """Copies the attribute container to a dictionary. Returns: - A dictionary containing the attribute container attributes. + dict[str, object]: attribute values per name. """ dictionary = {} - for attribute_name in iter(self.__dict__.keys()): - attribute_value = getattr(self, attribute_name, None) + for attribute_name, attribute_value in self.GetAttributes(): if attribute_value is None: continue @@ -57,13 +52,11 @@ def CopyToDict(self): return dictionary def GetString(self): - """Retrievs a string representation of the report. + """Retrieves a string representation of the report. Returns: - A string containing the report. + str: string representation of the report. """ - # TODO: Make this a more complete function that includes images - # and the option of saving as a full fledged HTML document. string_list = [] string_list.append(u'Report generated from: {0:s}'.format(self.plugin_name)) diff --git a/plaso/output/event_buffer.py b/plaso/output/event_buffer.py index efbccdb79f..d230b81143 100644 --- a/plaso/output/event_buffer.py +++ b/plaso/output/event_buffer.py @@ -28,7 +28,7 @@ def PopEvent(self): EventObject: event. """ try: - _, _, _, _, event = heapq.heappop(self._heap) + _, _, _, _, _, event = heapq.heappop(self._heap) return event except IndexError: @@ -44,8 +44,10 @@ def PushEvent(self, event): # Replace them by event specific attributes relevant to sorting. store_number = getattr(event, u'_store_number', None) store_index = getattr(event, u'_store_index', None) + uuid = getattr(event, u'uuid', None) heap_values = ( - event.timestamp, event.timestamp_desc, store_number, store_index, event) + event.timestamp, event.timestamp_desc, store_number, store_index, uuid, + event) heapq.heappush(self._heap, heap_values) def PushEvents(self, events): diff --git a/plaso/serializer/json_serializer.py b/plaso/serializer/json_serializer.py index 4c44917278..96c4a2e6e3 100644 --- a/plaso/serializer/json_serializer.py +++ b/plaso/serializer/json_serializer.py @@ -4,6 +4,7 @@ import binascii import collections import json +import logging from dfvfs.path import path_spec as dfvfs_path_spec from dfvfs.path import factory as dfvfs_path_spec_factory @@ -60,9 +61,6 @@ def _ConvertAttributeContainerToDict(cls, attribute_container): } for attribute_name, attribute_value in attribute_container.GetAttributes(): - if attribute_value is None: - continue - json_dict[attribute_name] = cls._ConvertAttributeValueToDict( attribute_value) @@ -218,19 +216,14 @@ def _ConvertDictToObject(cls, json_dict): container_type)) container_object = container_class() + supported_attribute_names = container_object.GetAttributeNames() for attribute_name, attribute_value in iter(json_dict.items()): - if attribute_name.startswith(u'__'): - continue - - # Event tags should be serialized separately. - # TODO: remove when analysis report no longer defines event tags. - if (container_type == u'analysis_report' and - attribute_name == u'_event_tags'): - continue - - # Be strict about which attributes to set in non event data containers. - if (container_type not in (u'event', u'event_data') and - attribute_name not in container_object.__dict__): + # Be strict about which attributes to set in non events. + if (container_type != u'event' and + attribute_name not in supported_attribute_names): + logging.debug( + u'Unusuppored attribute name: {0:s}.{1:s}'.format( + container_type, attribute_name)) continue if isinstance(attribute_value, dict): @@ -280,7 +273,7 @@ def _ConvertListToObject(cls, json_list): json_list: a list of the JSON serialized objects. Returns: - A deserialized list. + list[object]: a deserialized list. """ list_value = [] for json_list_element in json_list: diff --git a/plaso/storage/gzip_file.py b/plaso/storage/gzip_file.py index 928138359e..ffd3307fad 100644 --- a/plaso/storage/gzip_file.py +++ b/plaso/storage/gzip_file.py @@ -11,6 +11,7 @@ from plaso.lib import definitions from plaso.lib import platform_specific from plaso.serializer import json_serializer +from plaso.storage import identifiers from plaso.storage import interface @@ -96,6 +97,10 @@ def _WriteAttributeContainer(self, attribute_container): if self._read_only: raise IOError(u'Unable to write to read-only storage file.') + attribute_container_identifier = identifiers.SerializedStreamIdentifier( + 1, len(self._attribute_containers)) + attribute_container.SetIdentifier(attribute_container_identifier) + attribute_container_data = self._SerializeAttributeContainer( attribute_container) self._gzip_file.write(attribute_container_data) @@ -138,7 +143,19 @@ def AddEventTag(self, event_tag): Args: event_tag (EventTag): event tag. + + Raises: + IOError: if the event tag event identifier type is not supported. """ + event_identifier = event_tag.GetEventIdentifier() + if not isinstance( + event_identifier, identifiers.SerializedStreamIdentifier): + raise IOError(u'Unsupported event identifier type: {0:s}'.format( + type(event_identifier))) + + event_tag.event_stream_number = event_identifier.stream_number + event_tag.event_entry_index = event_identifier.entry_index + self._WriteAttributeContainer(event_tag) def Close(self): @@ -196,10 +213,15 @@ def GetEventSources(self): def GetEventTags(self): """Retrieves the event tags. - Returns: - generator(EventTag): event tag generator. + Yields: + EventTag: event tag. """ - return iter(self._GetAttributeContainerList(u'event_tag')) + for event_tag in iter(self._GetAttributeContainerList(u'event_tag')): + event_identifier = identifiers.SerializedStreamIdentifier( + event_tag.event_stream_number, event_tag.event_entry_index) + event_tag.SetEventIdentifier(event_identifier) + + yield event_tag def HasAnalysisReports(self): """Determines if a storage contains analysis reports. @@ -348,6 +370,11 @@ def _AddAttributeContainer(self, attribute_container): self._storage_writer.AddEvent(attribute_container) elif container_type == u'event_tag': + event_identifier = identifiers.SerializedStreamIdentifier( + attribute_container.event_stream_number, + attribute_container.event_entry_index) + attribute_container.SetEventIdentifier(event_identifier) + self._storage_writer.AddEventTag(attribute_container) elif container_type == u'extraction_error': @@ -409,14 +436,17 @@ def MergeAttributeContainers(self, maximum_number_of_containers=0): if not line.endswith(b'\n'): self._data_buffer = b''.join(lines[index:]) continue + attribute_container = self._DeserializeAttributeContainer( line, u'attribute_container') self._AddAttributeContainer(attribute_container) number_of_containers += 1 + if (maximum_number_of_containers > 0 and number_of_containers >= maximum_number_of_containers): self._data_buffer = b''.join(lines[index+1:]) return False + additional_data_buffer = self._gzip_file.read(self._DATA_BUFFER_SIZE) self._data_buffer = b''.join([self._data_buffer, additional_data_buffer]) diff --git a/plaso/storage/identifiers.py b/plaso/storage/identifiers.py new file mode 100644 index 0000000000..e2423b78b2 --- /dev/null +++ b/plaso/storage/identifiers.py @@ -0,0 +1,38 @@ +# -*- coding: utf-8 -*- +"""Storage attribute container identifier objects.""" + +from plaso.containers import interface as containers_interface + + +class SerializedStreamIdentifier( + containers_interface.AttributeContainerIdentifier): + """The serialized stream attribute container identifier. + + The identifier is used to uniquely identify attribute containers. Where + for example an attribute container is stored as a JSON serialized data in + a ZIP file. + + Attributes: + stream_number (int): number of the serialized attribute container stream. + entry_index (int): number of the serialized event within the stream. + """ + + def __init__(self, stream_number, entry_index): + """Initializes a serialized stream attribute container identifier. + + Args: + stream_number (int): number of the serialized attribute container stream. + entry_index (int): number of the serialized event within the stream. + """ + super(SerializedStreamIdentifier, self).__init__() + self.entry_index = entry_index + self.stream_number = stream_number + + def CopyToString(self): + """Copies the identifier to a string representation. + + Returns: + str: unique identifier or None. + """ + if self.stream_number is not None and self.entry_index is not None: + return u'{0:d}.{1:d}'.format(self.stream_number, self.entry_index) diff --git a/plaso/storage/zip_file.py b/plaso/storage/zip_file.py index 9b77dde6d3..26141e771a 100644 --- a/plaso/storage/zip_file.py +++ b/plaso/storage/zip_file.py @@ -22,9 +22,6 @@ event source objects. * event_tag_data.# The event tag data streams contain the serialized event tag objects. -* event_tag_index.# - The event tag index streams contain the stream offset to the serialized - event tag objects. * event_timestamps.# The event timestamps streams contain the timestamp of the serialized events. @@ -85,21 +82,13 @@ | timestamp | timestamp | ... | +-----------+-----------+-...-+ -+ The event tag index stream - -The event tag index streams contain information about the event -the tag applies to. - -An event data stream consists of an array of event tag index values. -+--------+--------+-...-+ -| struct | struct | ... | -+--------+--------+-...-+ - -See the _SerializedEventTagIndexTable class for more information about -the actual structure of an event tag index value. - + Version information +Deprecated in version 20170121: +* event_tag_index.# + The event tag index streams contain the stream offset to the serialized + event tag objects. + Deprecated in version 20160715: * information.dump The serialized preprocess objects. @@ -146,6 +135,7 @@ from plaso.lib import definitions from plaso.lib import platform_specific from plaso.serializer import json_serializer +from plaso.storage import identifiers from plaso.storage import interface from plaso.storage import gzip_file @@ -261,15 +251,16 @@ def PopEvent(self): except IndexError: return None, None - def PushEvent(self, event, stream_number, entry_index): + def PushEvent(self, event): """Pushes an event onto the heap. Args: event (EventObject): event. - stream_number (int): serialized data stream number. - entry_index (int): serialized data stream entry index. """ - heap_values = (event.timestamp, stream_number, entry_index, event) + event_identifier = event.GetIdentifier() + heap_values = ( + event.timestamp, event_identifier.stream_number, + event_identifier.entry_index, event) heapq.heappush(self._heap, heap_values) @@ -300,9 +291,10 @@ def PopEvent(self): """Pops an event from the heap. Returns: - A tuple containing an integer containing the event timestamp and - a binary string containing the serialized event data. - If the heap is empty the values in the tuple will be None. + tuple: contains: + + int: event timestamp or None if the heap is empty + bytes: serialized event data or None if the heap is empty """ try: timestamp, event_data = heapq.heappop(self._heap) @@ -326,93 +318,6 @@ def PushEvent(self, timestamp, event_data): self.data_size += len(event_data) -class _EventTagIndexValue(object): - """Class that defines the event tag index value. - - Attributes: - event_uuid (str): event identifier formatted as an UUID. - offset (int): serialized event tag data offset. - store_number (int): serialized data stream number. - store_index (int): serialized data stream entry index. - tag_type (int): tag type. - """ - TAG_TYPE_UNDEFINED = 0 - TAG_TYPE_NUMERIC = 1 - TAG_TYPE_UUID = 2 - - def __init__( - self, tag_type, offset, event_uuid=None, store_number=None, - store_index=None): - """Initializes the tag index value. - - Args: - tag_type (int): tag type. - offset (int): serialized event tag data offset. - event_uuid (Optional[str]): event identifier formatted as an UUID. - store_number (Optional[int]): serialized data stream number. - store_index (Optional[int]): serialized data stream entry index. - """ - super(_EventTagIndexValue, self).__init__() - self._identifier = None - self.event_uuid = event_uuid - self.offset = offset - self.store_number = store_number - self.store_index = store_index - self.tag_type = tag_type - - def __getitem__(self, key): - """Retrieves a specific instance attribute. - - This function is needed to support construct._build() as used - as of version 2.5.3. - - Args: - key (str): attribute name. - - Returns: - object: attribute value. - - Raises: - KeyError: if the instance does not have the attribute. - """ - if not hasattr(self, key): - raise KeyError(u'No such attribute: {0:s}'.format(key)) - - return getattr(self, key) - - def __str__(self): - """str: string representation of the event tag identifier.""" - string = u'tag_type: {0:d} offset: 0x{1:08x}'.format( - self.tag_type, self.offset) - - if self.tag_type == self.TAG_TYPE_NUMERIC: - return u'{0:s} store_number: {1:d} store_index: {2:d}'.format( - string, self.store_number, self.store_index) - - elif self.tag_type == self.TAG_TYPE_UUID: - return u'{0:s} event_uuid: {1:s}'.format(string, self.event_uuid) - - return string - - @property - def identifier(self): - """str: event identifier.""" - if not self._identifier: - if self.tag_type == self.TAG_TYPE_NUMERIC: - self._identifier = u'{0:d}:{1:d}'.format( - self.store_number, self.store_index) - - elif self.tag_type == self.TAG_TYPE_UUID: - self._identifier = self.event_uuid - - return self._identifier - - @property - def tag(self): - """The tag property to support construct.build().""" - return self - - class _SerializedDataStream(object): """Class that defines a serialized data stream.""" @@ -800,140 +705,6 @@ def Write(self): self._zip_file.writestr(self._stream_name, table_data) -class _SerializedEventTagIndexTable(object): - """Class that defines a serialized event tag index table.""" - - _TAG_STORE_STRUCT = construct.Struct( - u'tag_store', - construct.ULInt32(u'store_number'), - construct.ULInt32(u'store_index')) - - _TAG_UUID_STRUCT = construct.Struct( - u'tag_uuid', - construct.PascalString(u'event_uuid')) - - _TAG_INDEX_STRUCT = construct.Struct( - u'tag_index', - construct.Byte(u'tag_type'), - construct.ULInt32(u'offset'), - construct.IfThenElse( - u'tag', - lambda ctx: ctx[u'tag_type'] == 1, - _TAG_STORE_STRUCT, - _TAG_UUID_STRUCT)) - - def __init__(self, zip_file, stream_name): - """Initializes a serialized event tag index table. - - Args: - zip_file (zipfile.ZipFile): ZIP file that contains the stream. - stream_name (str): name of the stream. - """ - super(_SerializedEventTagIndexTable, self).__init__() - self._event_tag_indexes = [] - self._stream_name = stream_name - self._zip_file = zip_file - - @property - def number_of_entries(self): - """int: number of event tag index entries.""" - return len(self._event_tag_indexes) - - def AddEventTagIndex( - self, tag_type, offset, event_uuid=None, store_number=None, - store_index=None): - """Adds an event tag index. - - Args: - tag_type (int): event tag type. - offset (int): serialized event tag data offset. - event_uuid (Optional[str]): event identifier formatted as an UUID. - store_number (Optional[str]): store number. - store_index (Optional[str]): index relative to the start of the store. - """ - event_tag_index = _EventTagIndexValue( - tag_type, offset, event_uuid=event_uuid, store_number=store_number, - store_index=store_index) - self._event_tag_indexes.append(event_tag_index) - - def GetEventTagIndex(self, entry_index): - """Retrieves a specific event tag index. - - Args: - entry_index (int): table entry index. - - Returns: - _EventTagIndexValue: event tag index value. - - Raises: - IndexError: if the table entry index is out of bounds. - """ - return self._event_tag_indexes[entry_index] - - def Read(self): - """Reads the serialized event tag index table. - - Raises: - IOError: if the event tag index table cannot be read. - """ - try: - _, _, stream_store_number = self._stream_name.rpartition(u'.') - stream_store_number = int(stream_store_number, 10) - except ValueError as exception: - raise IOError(( - u'Unable to determine store number of stream: {0:s} ' - u'with error: {1:s}').format(self._stream_name, exception)) - - try: - file_object = self._zip_file.open(self._stream_name, mode='r') - except KeyError as exception: - raise IOError( - u'Unable to open stream with error: {0:s}'.format(exception)) - - try: - while True: - try: - tag_index_struct = self._TAG_INDEX_STRUCT.parse_stream(file_object) - except (construct.FieldError, AttributeError): - break - - tag_type = tag_index_struct.get( - u'tag_type', _EventTagIndexValue.TAG_TYPE_UNDEFINED) - if tag_type not in ( - _EventTagIndexValue.TAG_TYPE_NUMERIC, - _EventTagIndexValue.TAG_TYPE_UUID): - logging.warning(u'Unsupported tag type: {0:d}'.format(tag_type)) - break - - offset = tag_index_struct.get(u'offset', None) - tag_index = tag_index_struct.get(u'tag', {}) - event_uuid = tag_index.get(u'event_uuid', None) - store_number = tag_index.get(u'store_number', stream_store_number) - store_index = tag_index.get(u'store_index', None) - - event_tag_index = _EventTagIndexValue( - tag_type, offset, event_uuid=event_uuid, store_number=store_number, - store_index=store_index) - self._event_tag_indexes.append(event_tag_index) - - finally: - file_object.close() - - def Write(self): - """Writes the event tag index table. - - Raises: - IOError: if the event tag index table cannot be written. - """ - serialized_entries = [] - for event_tag_index in self._event_tag_indexes: - entry_data = self._TAG_INDEX_STRUCT.build(event_tag_index) - serialized_entries.append(entry_data) - - table_data = b''.join(serialized_entries) - self._zip_file.writestr(self._stream_name, table_data) - - class _StorageMetadata(object): """Class that implements storage metadata. @@ -1015,12 +786,14 @@ class ZIPStorageFile(interface.BaseFileStorage): storage_type (str): storage type. """ + NEXT_AVAILABLE_ENTRY = -1 + # The format version. - _FORMAT_VERSION = 20160715 + _FORMAT_VERSION = 20170121 # The earliest format version, stored in-file, that this class # is able to read. - _COMPATIBLE_FORMAT_VERSION = 20160715 + _COMPATIBLE_FORMAT_VERSION = 20170121 # The maximum buffer size of serialized data before triggering # a flush to disk (64 MiB). @@ -1071,7 +844,11 @@ def __init__( self._event_sources_in_stream = [] self._event_sources_list = _AttributeContainersList() self._event_tag_index = None + self._event_tag_offset_tables = {} + self._event_tag_offset_tables_lfu = [] + self._event_tag_streams = {} self._event_tag_stream_number = 1 + self._event_tags_list = _AttributeContainersList() self._event_timestamp_tables = {} self._event_timestamp_tables_lfu = [] self._event_heap = None @@ -1090,25 +867,17 @@ def __init__( self.serialization_format = definitions.SERIALIZER_FORMAT_JSON self.storage_type = storage_type - def _BuildTagIndex(self): - """Builds the tag index that contains the offsets for each tag. + def _BuildEventTagIndex(self): + """Builds the event tag index. Raises: - IOError: if the stream cannot be opened. + IOError: if a stream is missing. """ self._event_tag_index = {} - - for stream_name in self._GetStreamNames(): - if not stream_name.startswith(u'event_tag_index.'): - continue - - event_tag_index_table = _SerializedEventTagIndexTable( - self._zipfile, stream_name) - event_tag_index_table.Read() - - for entry_index in range(event_tag_index_table.number_of_entries): - tag_index_value = event_tag_index_table.GetEventTagIndex(entry_index) - self._event_tag_index[tag_index_value.identifier] = tag_index_value + for event_tag in self.GetEventTags(): + event_identifier = event_tag.GetEventIdentifier() + lookup_key = event_identifier.CopyToString() + self._event_tag_index[lookup_key] = event_tag.GetIdentifier() def _FillEventHeapFromStream(self, stream_number): """Fills the event heap with the next events from the stream. @@ -1127,10 +896,7 @@ def _FillEventHeapFromStream(self, stream_number): if not event: return - # TODO: refactor. - store_index = getattr(event, u'_store_index', None) - - self._event_heap.PushEvent(event, stream_number, store_index) + self._event_heap.PushEvent(event) reference_timestamp = event.timestamp while event.timestamp == reference_timestamp: @@ -1138,18 +904,16 @@ def _FillEventHeapFromStream(self, stream_number): if not event: break - # TODO: refactor. - store_index = getattr(event, u'_store_index', None) - - self._event_heap.PushEvent(event, stream_number, store_index) + self._event_heap.PushEvent(event) - def _GetEvent(self, stream_number, entry_index=-1): + def _GetEvent(self, stream_number, entry_index=NEXT_AVAILABLE_ENTRY): """Reads an event from a specific stream. Args: - stream_number (int): number of the serialized event object stream. + stream_number (int): number of the serialized event stream. entry_index (Optional[int]): number of the serialized event within - the stream, where -1 represents the next available event. + the stream, where NEXT_AVAILABLE_ENTRY represents the next available + event. Returns: EventObject: event or None. @@ -1160,34 +924,48 @@ def _GetEvent(self, stream_number, entry_index=-1): return event = self._DeserializeAttributeContainer(event_data, u'event') - - # TODO: refactor. - setattr(event, u'_store_number', stream_number) - setattr(event, u'_store_index', entry_index) - + if event: + event_identifier = identifiers.SerializedStreamIdentifier( + stream_number, entry_index) + event.SetIdentifier(event_identifier) return event - def _GetEventSerializedData(self, stream_number, entry_index=-1): + def _GetEventSerializedData( + self, stream_number, entry_index=NEXT_AVAILABLE_ENTRY): """Retrieves specific event serialized data. By default the first available entry in the specific serialized stream is read, however any entry can be read using the index stream. Args: - stream_number (int): number of the serialized event object stream. + stream_number (int): number of the serialized event stream. entry_index (Optional[int]): number of the serialized event within - the stream, where -1 represents the next available event. + the stream, where NEXT_AVAILABLE_ENTRY represents the next available + event. Returns: - A tuple containing the event serialized data and the entry index - of the event within the storage file. + tuple: contains: + + bytes: event serialized data. + int: entry index of the event within the stream. Raises: IOError: if the stream cannot be opened. - ValueError: if the entry index is out of bounds. + ValueError: if the stream number or entry index is out of bounds. """ - if entry_index < -1: - raise ValueError(u'Entry index out of bounds.') + if stream_number is None: + raise ValueError(u'Invalid stream number.') + + if entry_index is None: + raise ValueError(u'Invalid entry index.') + + if stream_number < 1 or stream_number > self._event_stream_number: + raise ValueError(u'Stream number: {0:d} out of bounds.'.format( + stream_number)) + + if entry_index < self.NEXT_AVAILABLE_ENTRY: + raise ValueError(u'Entry index: {0:d} out of bounds.'.format( + entry_index)) try: data_stream = self._GetSerializedEventStream(stream_number) @@ -1220,13 +998,14 @@ def _GetEventSerializedData(self, stream_number, entry_index=-1): return event_data, event_entry_index - def _GetEventSource(self, stream_number, entry_index=-1): + def _GetEventSource(self, stream_number, entry_index=NEXT_AVAILABLE_ENTRY): """Reads an event source from a specific stream. Args: - stream_number (int): number of the serialized event source object stream. + stream_number (int): number of the serialized event source stream. entry_index (Optional[int]): number of the serialized event source - within the stream, where -1 represents the next available event + within the stream, where NEXT_AVAILABLE_ENTRY represents the next + available event source. Returns: @@ -1237,32 +1016,51 @@ def _GetEventSource(self, stream_number, entry_index=-1): if not event_source_data: return - return self._DeserializeAttributeContainer( + event_source = self._DeserializeAttributeContainer( event_source_data, u'event_source') + if event_source: + event_source_identifier = identifiers.SerializedStreamIdentifier( + stream_number, entry_index) + event_source.SetIdentifier(event_source_identifier) + return event_source - def _GetEventSourceSerializedData(self, stream_number, entry_index=-1): + def _GetEventSourceSerializedData( + self, stream_number, entry_index=NEXT_AVAILABLE_ENTRY): """Retrieves specific event source serialized data. By default the first available entry in the specific serialized stream is read, however any entry can be read using the index stream. Args: - stream_number (int): number of the serialized event source object stream. + stream_number (int): number of the serialized event source stream. entry_index (Optional[int]): number of the serialized event source - within the stream, where -1 represents the next available event - source. + within the stream, where NEXT_AVAILABLE_ENTRY represents the next + available event source. Returns: - A tuple containing the event source serialized data and the entry index - of the event source within the storage file. + tuple: contains: + + bytes: event source serialized data. + int: entry index of the event source within the stream. Raises: IOError: if the stream cannot be opened. - ValueError: if the entry index is out of bounds. + ValueError: if the stream number or entry index is out of bounds. """ - if entry_index < -1: + if stream_number < 1 or stream_number > self._event_source_stream_number: + raise ValueError(u'Stream number out of bounds.') + + if entry_index < self.NEXT_AVAILABLE_ENTRY: raise ValueError(u'Entry index out of bounds.') + if stream_number == self._event_source_stream_number: + if entry_index < 0: + raise ValueError(u'Entry index out of bounds.') + + event_source_data = self._event_sources_list.GetAttributeContainerByIndex( + entry_index) + return event_source_data, entry_index + try: data_stream = self._GetSerializedEventSourceStream(stream_number) except IOError as exception: @@ -1294,29 +1092,128 @@ def _GetEventSourceSerializedData(self, stream_number, entry_index=-1): return event_source_data, event_source_entry_index - def _GetEventTagIndexValue(self, store_number, entry_index, uuid): - """Retrieves an event tag index value. + def _GetEventTag(self, stream_number, entry_index=NEXT_AVAILABLE_ENTRY): + """Reads an event tag from a specific stream. Args: - store_number (int): store number. - entry_index (int): serialized data stream entry index. - uuid (str): event identifier formatted as an UUID. + stream_number (int): number of the serialized event tag stream. + entry_index (Optional[int]): number of the serialized event tag + within the stream, where NEXT_AVAILABLE_ENTRY represents + the next available event tag. Returns: - An event tag index value (instance of _EventTagIndexValue). + EventTag: event tag or None. """ - if self._event_tag_index is None: - self._BuildTagIndex() + event_tag_data, entry_index = self._GetEventTagSerializedData( + stream_number, entry_index=entry_index) + if not event_tag_data: + return + + event_tag = self._DeserializeAttributeContainer( + event_tag_data, u'event_tag') + if event_tag: + event_tag_identifier = identifiers.SerializedStreamIdentifier( + stream_number, entry_index) + event_tag.SetIdentifier(event_tag_identifier) + + event_identifier = identifiers.SerializedStreamIdentifier( + event_tag.event_stream_number, event_tag.event_entry_index) + event_tag.SetEventIdentifier(event_identifier) - # Try looking up event tag by numeric identifier. - tag_identifier = u'{0:d}:{1:d}'.format(store_number, entry_index) - tag_index_value = self._event_tag_index.get(tag_identifier, None) + return event_tag - # Try looking up event tag by UUID. - if tag_index_value is None: - tag_index_value = self._event_tag_index.get(uuid, None) + def _GetEventTagByIdentifier(self, event_identifier): + """Retrieves an event tag by the event identifier. - return tag_index_value + Args: + event_identifier (AttributeContainerIdentifier): event attribute + container identifier. + + Returns: + EventTag: event tag or None. + + Raises: + IOError: if the event tag data stream cannot be opened. + """ + if not self._event_tag_index: + self._BuildEventTagIndex() + + lookup_key = event_identifier.CopyToString() + event_tag_identifier = self._event_tag_index.get(lookup_key, None) + if not event_tag_identifier: + return + + return self._GetEventTag( + event_tag_identifier.stream_number, + entry_index=event_tag_identifier.entry_index) + + def _GetEventTagSerializedData( + self, stream_number, entry_index=NEXT_AVAILABLE_ENTRY): + """Retrieves specific event tag serialized data. + + By default the first available entry in the specific serialized stream + is read, however any entry can be read using the index stream. + + Args: + stream_number (int): number of the serialized event tag stream. + entry_index (Optional[int]): number of the serialized event tag + within the stream, where NEXT_AVAILABLE_ENTRY represents + the next available event tag. + + Returns: + tuple: contains: + + bytes: event tag serialized data. + int: entry index of the event tag within the stream. + + Raises: + IOError: if the stream cannot be opened. + ValueError: if the stream number or entry index is out of bounds. + """ + if stream_number < 1 or stream_number > self._event_tag_stream_number: + raise ValueError(u'Stream number out of bounds.') + + if entry_index < self.NEXT_AVAILABLE_ENTRY: + raise ValueError(u'Entry index out of bounds.') + + if stream_number == self._event_tag_stream_number: + if entry_index < 0: + raise ValueError(u'Entry index out of bounds.') + + event_tag_data = self._event_tags_list.GetAttributeContainerByIndex( + entry_index) + return event_tag_data, entry_index + + try: + data_stream = self._GetSerializedEventTagStream(stream_number) + except IOError as exception: + logging.error(( + u'Unable to retrieve serialized data steam: {0:d} ' + u'with error: {1:s}.').format(stream_number, exception)) + return None, None + + if entry_index >= 0: + try: + offset_table = self._GetSerializedEventTagOffsetTable(stream_number) + stream_offset = offset_table.GetOffset(entry_index) + except (IOError, IndexError): + logging.error(( + u'Unable to read entry index: {0:d} from serialized data stream: ' + u'{1:d}').format(entry_index, stream_number)) + return None, None + + data_stream.SeekEntryAtOffset(entry_index, stream_offset) + + event_tag_entry_index = data_stream.entry_index + try: + event_tag_data = data_stream.ReadEntry() + except IOError as exception: + logging.error(( + u'Unable to read entry from serialized data steam: {0:d} ' + u'with error: {1:s}.').format(stream_number, exception)) + return None, None + + return event_tag_data, event_tag_entry_index def _GetLastStreamNumber(self, stream_name_prefix): """Retrieves the last stream number. @@ -1487,6 +1384,21 @@ def _GetSerializedEventSourceStream(self, stream_number): return self._GetSerializedDataStream( self._event_source_streams, u'event_source_data', stream_number) + def _GetSerializedEventTagStream(self, stream_number): + """Retrieves the serialized event tag stream. + + Args: + stream_number (int): number of the stream. + + Returns: + _SerializedDataStream: serialized data stream. + + Raises: + IOError: if the stream cannot be opened. + """ + return self._GetSerializedDataStream( + self._event_tag_streams, u'event_tag_data', stream_number) + def _GetSerializedEventStream(self, stream_number): """Retrieves the serialized event stream. @@ -1518,6 +1430,22 @@ def _GetSerializedEventStreamNumbers(self): """ return self._GetSerializedDataStreamNumbers(u'event_data.') + def _GetSerializedEventTagOffsetTable(self, stream_number): + """Retrieves the serialized event tag stream offset table. + + Args: + stream_number (int): number of the stream. + + Returns: + _SerializedDataOffsetTable: serialized data offset table. + + Raises: + IOError: if the stream cannot be opened. + """ + return self._GetSerializedDataOffsetTable( + self._event_tag_offset_tables, self._event_tag_offset_tables_lfu, + u'event_tag_index', stream_number) + def _GetSerializedEventTimestampTable(self, stream_number): """Retrieves the serialized event stream timestamp table. @@ -1592,12 +1520,8 @@ def _GetSortedEvent(self, time_range=None): next_event.timestamp != event.timestamp): self._FillEventHeapFromStream(stream_number) - # TODO: refactor. - store_number = getattr(event, u'_store_number', None) - store_index = getattr(event, u'_store_index', None) - event.tag = self._ReadEventTagByIdentifier( - store_number, store_index, event.uuid) - + event_identifier = event.GetIdentifier() + event.tag = self._GetEventTagByIdentifier(event_identifier) return event def _HasStream(self, stream_name): @@ -1631,7 +1555,7 @@ def _InitializeMergeBuffer(self, time_range=None): number_range = self._GetSerializedEventStreamNumbers() for stream_number in number_range: - entry_index = -1 + entry_index = self.NEXT_AVAILABLE_ENTRY if time_range: stream_name = u'event_timestamps.{0:06d}'.format(stream_number) if self._HasStream(stream_name): @@ -1645,7 +1569,8 @@ def _InitializeMergeBuffer(self, time_range=None): # If the start timestamp of the time range filter is larger than the # last timestamp in the timestamp table skip this stream. - timestamp_compare = timestamp_table.GetTimestamp(-1) + timestamp_compare = timestamp_table.GetTimestamp( + self.NEXT_AVAILABLE_ENTRY) if time_range.start_timestamp > timestamp_compare: continue @@ -1665,10 +1590,7 @@ def _InitializeMergeBuffer(self, time_range=None): if time_range and event.timestamp > time_range.end_timestamp: continue - # TODO: refactor. - store_index = getattr(event, u'_store_index', None) - - self._event_heap.PushEvent(event, stream_number, store_index) + self._event_heap.PushEvent(event) reference_timestamp = event.timestamp while event.timestamp == reference_timestamp: @@ -1676,10 +1598,7 @@ def _InitializeMergeBuffer(self, time_range=None): if not event: break - # TODO: refactor. - store_index = getattr(event, u'_store_index', None) - - self._event_heap.PushEvent(event, stream_number, store_index) + self._event_heap.PushEvent(event) def _OpenRead(self): """Opens the storage file for reading.""" @@ -1700,6 +1619,7 @@ def _OpenRead(self): self._serializer = json_serializer.JSONAttributeContainerSerializer + # TODO: create a single function to determin last stream numbers. self._error_stream_number = self._GetLastStreamNumber(u'error_data.') self._event_stream_number = self._GetLastStreamNumber(u'event_data.') self._event_source_stream_number = self._GetLastStreamNumber( @@ -1758,7 +1678,7 @@ def _OpenZIPFile(self, path, read_only): Raises: IOError: if the ZIP file is already opened or if the ZIP file cannot - be opened. + be opened. """ if self._zipfile: raise IOError(u'ZIP file already opened.') @@ -1837,35 +1757,6 @@ def _ReadAttributeContainersFromStream(self, data_stream, container_type): attribute_container = self._ReadAttributeContainerFromStreamEntry( data_stream, container_type) - def _ReadEventTagByIdentifier(self, store_number, entry_index, uuid): - """Reads an event tag by identifier. - - Args: - store_number (int): store number. - entry_index (int): serialized data stream entry index. - uuid (str): event identifier formatted as an UUID. - - Returns: - EventTag: event tag or None. - - Raises: - IOError: if the event tag data stream cannot be opened. - """ - tag_index_value = self._GetEventTagIndexValue( - store_number, entry_index, uuid) - if tag_index_value is None: - return - - stream_name = u'event_tag_data.{0:06d}'.format(tag_index_value.store_number) - if not self._HasStream(stream_name): - raise IOError(u'No such stream: {0:s}'.format(stream_name)) - - data_stream = _SerializedDataStream( - self._zipfile, self._zipfile_path, stream_name) - data_stream.SeekEntryAtOffset(entry_index, tag_index_value.offset) - - return self._ReadAttributeContainerFromStreamEntry(data_stream, u'event') - def _ReadSerializerStream(self): """Reads the serializer stream. @@ -2084,56 +1975,14 @@ def _WriteSerializedEventSources(self): def _WriteSerializedEventTags(self): """Writes the serialized event tags.""" - if not self._serialized_event_tags_size: + if not self._event_tags_list.data_size: return - stream_name = u'event_tag_index.{0:06d}'.format( - self._event_tag_stream_number) - event_tag_index_table = _SerializedEventTagIndexTable( - self._zipfile, stream_name) - - if self._serializers_profiler: - self._serializers_profiler.StartTiming(u'write') - - stream_name = u'event_tag_data.{0:06d}'.format( - self._event_tag_stream_number) - data_stream = _SerializedDataStream( - self._zipfile, self._zipfile_path, stream_name) - entry_data_offset = data_stream.WriteInitialize() - - try: - for _ in range(len(self._serialized_event_tags)): - heap_values = heapq.heappop(self._serialized_event_tags) - store_number, store_index, event_uuid, entry_data = heap_values - - if event_uuid: - tag_type = _EventTagIndexValue.TAG_TYPE_UUID - else: - tag_type = _EventTagIndexValue.TAG_TYPE_NUMERIC - - event_tag_index_table.AddEventTagIndex( - tag_type, entry_data_offset, event_uuid=event_uuid, - store_number=store_number, store_index=store_index) - - entry_data_offset = data_stream.WriteEntry(entry_data) - - except: - data_stream.WriteAbort() - - if self._serializers_profiler: - self._serializers_profiler.StopTiming(u'write') - - raise - - event_tag_index_table.Write() - data_stream.WriteFinalize() - - if self._serializers_profiler: - self._serializers_profiler.StopTiming(u'write') + self._WriteAttributeContainersList( + self._event_tags_list, u'event_tag', self._event_tag_stream_number) self._event_tag_stream_number += 1 - self._serialized_event_tags_size = 0 - self._serialized_event_tags = [] + self._event_tags_list.Empty() def _WriteSessionCompletion(self, session_completion): """Writes a session completion attribute container. @@ -2144,7 +1993,7 @@ def _WriteSessionCompletion(self, session_completion): Raises: IOError: if the storage type does not support writing a session - completion or the session completion already exists. + completion or the session completion already exists. """ if self.storage_type != definitions.STORAGE_TYPE_SESSION: raise IOError(u'Session completion not supported by storage type.') @@ -2171,7 +2020,7 @@ def _WriteSessionStart(self, session_start): Raises: IOError: if the storage type does not support writing a session - start or the session start already exists. + start or the session start already exists. """ if self.storage_type != definitions.STORAGE_TYPE_SESSION: raise IOError(u'Session completion not supported by storage type.') @@ -2228,7 +2077,7 @@ def _WriteTaskCompletion(self, task_completion): Raises: IOError: if the storage type does not support writing a task - completion or the task completion already exists. + completion or the task completion already exists. """ if self.storage_type != definitions.STORAGE_TYPE_TASK: raise IOError(u'Task completion not supported by storage type.') @@ -2254,7 +2103,7 @@ def _WriteTaskStart(self, task_start): Raises: IOError: if the storage type does not support writing a task start - or the task start already exists. + or the task start already exists. """ if self.storage_type != definitions.STORAGE_TYPE_TASK: raise IOError(u'Task start not supported by storage type.') @@ -2287,6 +2136,10 @@ def AddAnalysisReport(self, analysis_report): if self._read_only: raise IOError(u'Unable to write to read-only storage file.') + analysis_report_identifier = identifiers.SerializedStreamIdentifier( + self._analysis_report_stream_number, 0) + analysis_report.SetIdentifier(analysis_report_identifier) + stream_name = u'analysis_report_data.{0:06}'.format( self._analysis_report_stream_number) @@ -2308,9 +2161,18 @@ def AddError(self, error): Raises: IOError: when the storage file is closed or read-only or - if the error cannot be serialized. + if the error cannot be serialized. """ - error.storage_session = self._last_session + if not self._is_open: + raise IOError(u'Unable to write to closed storage file.') + + if self._read_only: + raise IOError(u'Unable to write to read-only storage file.') + + error_identifier = identifiers.SerializedStreamIdentifier( + self._error_stream_number, + self._errors_list.number_of_attribute_containers) + error.SetIdentifier(error_identifier) # We try to serialize the error first, so we can skip some # processing if it is invalid. @@ -2329,7 +2191,7 @@ def AddEvent(self, event): Raises: IOError: when the storage file is closed or read-only or - if the event cannot be serialized. + if the event cannot be serialized. """ if not self._is_open: raise IOError(u'Unable to write to closed storage file.') @@ -2337,6 +2199,11 @@ def AddEvent(self, event): if self._read_only: raise IOError(u'Unable to write to read-only storage file.') + event_identifier = identifiers.SerializedStreamIdentifier( + self._event_stream_number, + self._serialized_events_heap.number_of_events) + event.SetIdentifier(event_identifier) + # We try to serialize the event first, so we can skip some # processing if it is invalid. event_data = self._SerializeAttributeContainer(event) @@ -2354,7 +2221,7 @@ def AddEventSource(self, event_source): Raises: IOError: when the storage file is closed or read-only or - if the event source cannot be serialized. + if the event source cannot be serialized. """ if not self._is_open: raise IOError(u'Unable to write to closed storage file.') @@ -2362,7 +2229,10 @@ def AddEventSource(self, event_source): if self._read_only: raise IOError(u'Unable to write to read-only storage file.') - event_source.storage_session = self._last_session + event_source_identifier = identifiers.SerializedStreamIdentifier( + self._event_source_stream_number, + self._event_sources_list.number_of_attribute_containers) + event_source.SetIdentifier(event_source_identifier) # We try to serialize the event source first, so we can skip some # processing if it is invalid. @@ -2376,12 +2246,16 @@ def AddEventSource(self, event_source): def AddEventTag(self, event_tag): """Adds an event tag. + If the event referenced by the tag is already tagged, the comment + and labels will be appended to the existing tag. + Args: event_tag (EventTag): event tag. Raises: IOError: when the storage file is closed or read-only or - if the event tag cannot be serialized. + if the event tag cannot be serialized or + if the event tag event identifier type is not supported. """ if not self._is_open: raise IOError(u'Unable to write to closed storage file.') @@ -2389,20 +2263,40 @@ def AddEventTag(self, event_tag): if self._read_only: raise IOError(u'Unable to write to read-only storage file.') + event_identifier = event_tag.GetEventIdentifier() + if not isinstance( + event_identifier, identifiers.SerializedStreamIdentifier): + raise IOError(u'Unsupported event identifier type: {0:s}'.format( + type(event_identifier))) + + event_tag_identifier = identifiers.SerializedStreamIdentifier( + self._event_tag_stream_number, + self._event_tags_list.number_of_attribute_containers) + event_tag.SetIdentifier(event_tag_identifier) + + # Check if the event has already been tagged on a previous occasion, + # we need to append the event tag any existing event tag. + stored_event_tag = self._GetEventTagByIdentifier(event_identifier) + + if stored_event_tag: + event_tag.AddComment(stored_event_tag.comment) + event_tag.AddLabels(stored_event_tag.labels) + + event_tag.event_stream_number = event_identifier.stream_number + event_tag.event_entry_index = event_identifier.entry_index + + # TODO: update event tag index value. + lookup_key = event_identifier.CopyToString() + self._event_tag_index[lookup_key] = event_tag_identifier + # We try to serialize the event tag first, so we can skip some # processing if it is invalid. event_tag_data = self._SerializeAttributeContainer(event_tag) - event_uuid = getattr(event_tag, u'event_uuid', None) - store_index = getattr(event_tag, u'store_index', None) - store_number = getattr(event_tag, u'store_number', None) + self._event_tags_list.PushAttributeContainer(event_tag_data) - heap_values = (store_number, store_index, event_uuid, event_tag_data) - heapq.heappush(self._serialized_event_tags, heap_values) - self._serialized_event_tags_size += len(event_tag_data) - - if self._serialized_event_tags_size > self._maximum_buffer_size: - self._WriteSerializedEventSources() + if self._event_tags_list.data_size > self._maximum_buffer_size: + self._WriteSerializedEventTags() def AddEventTags(self, event_tags): """Adds event tags. @@ -2412,7 +2306,7 @@ def AddEventTags(self, event_tags): Raises: IOError: when the storage file is closed or read-only or - if the stream cannot be opened. + if the stream cannot be opened. """ if not self._is_open: raise IOError(u'Unable to write to closed storage file.') @@ -2420,49 +2314,9 @@ def AddEventTags(self, event_tags): if self._read_only: raise IOError(u'Unable to write to read-only storage file.') - if self._event_tag_index is None: - self._BuildTagIndex() - for event_tag in event_tags: - tag_index_value = self._event_tag_index.get(event_tag.string_key, None) - - # This particular event has already been tagged on a previous occasion, - # we need to make sure we are appending to that particular event tag. - if tag_index_value is not None: - stream_name = u'event_tag_data.{0:06d}'.format( - tag_index_value.store_number) - - if not self._HasStream(stream_name): - raise IOError(u'No such stream: {0:s}'.format(stream_name)) - - data_stream = _SerializedDataStream( - self._zipfile, self._zipfile_path, stream_name) - # TODO: replace 0 by the actual event tag entry index. - # This is for code consistency rather then a functional purpose. - data_stream.SeekEntryAtOffset(0, tag_index_value.offset) - - # TODO: if stored_event_tag is cached make sure to update cache - # after write. - stored_event_tag = self._ReadAttributeContainerFromStreamEntry( - data_stream, u'event_tag') - if not stored_event_tag: - continue - - event_tag.AddComment(stored_event_tag.comment) - event_tag.AddLabels(stored_event_tag.labels) - self.AddEventTag(event_tag) - self._WriteSerializedEventTags() - - # TODO: Update the tags that have changed in the index instead - # of flushing the index. - - # If we already built a list of tag in memory we need to clear that - # since the tags have changed. - if self._event_tag_index is not None: - self._event_tag_index = None - def Close(self): """Closes the storage file. @@ -2470,8 +2324,8 @@ def Close(self): Raises: IOError: if the storage file is already closed, - if the event source cannot be serialized or - if the storage file cannot be closed. + if the event source cannot be serialized or + if the storage file cannot be closed. """ if not self._is_open: raise IOError(u'Storage file already closed.') @@ -2493,6 +2347,10 @@ def Close(self): self._event_source_offset_tables_lfu = [] self._event_source_streams = {} + self._event_tag_offset_tables = [] + self._event_tag_offset_tables_lfu = [] + self._event_tag_streams = {} + self._event_timestamp_tables = {} self._event_timestamp_tables_lfu = [] @@ -2529,7 +2387,7 @@ def Flush(self): Raises: IOError: when trying to write to a closed storage file or - if the event source cannot be serialized. + if the event source cannot be serialized. """ if not self._is_open: raise IOError(u'Unable to flush a closed storage file.') @@ -2558,6 +2416,7 @@ def GetAnalysisReports(self): for analysis_report in self._ReadAttributeContainersFromStream( data_stream, u'analysis_report'): + # TODO: add SetIdentifier. yield analysis_report def GetErrors(self): @@ -2577,8 +2436,11 @@ def GetErrors(self): data_stream = _SerializedDataStream( self._zipfile, self._zipfile_path, stream_name) - for error in self._ReadAttributeContainersFromStream( - data_stream, u'error'): + generator = self._ReadAttributeContainersFromStream(data_stream, u'error') + for entry_index, error in enumerate(generator): + error_identifier = identifiers.SerializedStreamIdentifier( + stream_number, entry_index) + error.SetIdentifier(error_identifier) yield error def GetEvents(self, time_range=None): @@ -2637,11 +2499,22 @@ def GetEventSourceByIndex(self, index): self._zipfile, self._zipfile_path, stream_name) data_stream.SeekEntryAtOffset(index, stream_offset) - return self._ReadAttributeContainerFromStreamEntry( + event_source = self._ReadAttributeContainerFromStreamEntry( data_stream, u'event_source') + if event_source: + event_source_identifier = identifiers.SerializedStreamIdentifier( + stream_number, index) + event_source.SetIdentifier(event_source_identifier) + return event_source entry_data = self._event_sources_list.GetAttributeContainerByIndex(index) - return self._DeserializeAttributeContainer(entry_data, u'event_source') + event_source = self._DeserializeAttributeContainer( + entry_data, u'event_source') + if event_source: + event_source_identifier = identifiers.SerializedStreamIdentifier( + stream_number, index) + event_source.SetIdentifier(event_source_identifier) + return event_source def GetEventSources(self): """Retrieves the event sources. @@ -2660,8 +2533,12 @@ def GetEventSources(self): data_stream = _SerializedDataStream( self._zipfile, self._zipfile_path, stream_name) - for event_source in self._ReadAttributeContainersFromStream( - data_stream, u'event_source'): + generator = self._ReadAttributeContainersFromStream( + data_stream, u'event_source') + for entry_index, event_source in enumerate(generator): + event_source_identifier = identifiers.SerializedStreamIdentifier( + stream_number, entry_index) + event_source.SetIdentifier(event_source_identifier) yield event_source def GetEventTags(self): @@ -2681,8 +2558,17 @@ def GetEventTags(self): data_stream = _SerializedDataStream( self._zipfile, self._zipfile_path, stream_name) - for event_tag in self._ReadAttributeContainersFromStream( - data_stream, u'event_tag'): + generator = self._ReadAttributeContainersFromStream( + data_stream, u'event_tag') + for entry_index, event_tag in enumerate(generator): + event_tag_identifier = identifiers.SerializedStreamIdentifier( + stream_number, entry_index) + event_tag.SetIdentifier(event_tag_identifier) + + event_identifier = identifiers.SerializedStreamIdentifier( + event_tag.event_stream_number, event_tag.event_entry_index) + event_tag.SetEventIdentifier(event_identifier) + yield event_tag def GetNumberOfAnalysisReports(self): @@ -2845,8 +2731,8 @@ def WritePreprocessingInformation(self, knowledge_base): Raises: IOError: if the storage type does not support writing preprocess - information or the storage file is closed or read-only or - if the preprocess information stream already exists. + information or the storage file is closed or read-only or + if the preprocess information stream already exists. """ if not self._is_open: raise IOError(u'Unable to write to closed storage file.') @@ -3095,7 +2981,7 @@ def CheckTaskReadyForMerge(self, task): Raises: IOError: if the storage type is not supported or - if the temporary path for the task storage does not exist. + if the temporary path for the task storage does not exist. """ if self._storage_type != definitions.STORAGE_TYPE_SESSION: raise IOError(u'Unsupported storage type.') @@ -3139,7 +3025,7 @@ def CreateTaskStorage(self, task): Raises: IOError: if the storage type is not supported or - if the temporary path for the task storage does not exist. + if the temporary path for the task storage does not exist. """ if self._storage_type != definitions.STORAGE_TYPE_SESSION: raise IOError(u'Unsupported storage type.') @@ -3247,7 +3133,7 @@ def PrepareMergeTaskStorage(self, task): Raises: IOError: if the storage type is not supported or - if the temporary path for the task storage does not exist. + if the temporary path for the task storage does not exist. """ if self._storage_type != definitions.STORAGE_TYPE_SESSION: raise IOError(u'Unsupported storage type.') @@ -3308,10 +3194,9 @@ def StartMergeTaskStorage(self, task): Raises: IOError: if the storage file cannot be opened or - if the storage type is not supported or - if the temporary path for the task storage does not exist or - if the temporary path for the task storage doe not refers to - a file. + if the storage type is not supported or + if the temporary path for the task storage does not exist or + if the temporary path for the task storage doe not refers to a file. """ if self._storage_type != definitions.STORAGE_TYPE_SESSION: raise IOError(u'Unsupported storage type.') @@ -3332,7 +3217,7 @@ def StartTaskStorage(self): Raises: IOError: if the storage type is not supported or - if the temporary path for the task storage already exists. + if the temporary path for the task storage already exists. """ if self._storage_type != definitions.STORAGE_TYPE_SESSION: raise IOError(u'Unsupported storage type.') @@ -3355,7 +3240,7 @@ def StopTaskStorage(self, abort=False): Raises: IOError: if the storage type is not supported or - if the temporary path for the task storage does not exist. + if the temporary path for the task storage does not exist. """ if self._storage_type != definitions.STORAGE_TYPE_SESSION: raise IOError(u'Unsupported storage type.') @@ -3386,7 +3271,7 @@ def WritePreprocessingInformation(self, knowledge_base): Raises: IOError: if the storage type does not support writing preprocessing - information or when the storage writer is closed. + information or when the storage writer is closed. """ if not self._storage_file: raise IOError(u'Unable to write to closed storage writer.') @@ -3404,7 +3289,7 @@ def WriteSessionCompletion(self, aborted=False): Raises: IOError: if the storage type is not supported or - when the storage writer is closed. + when the storage writer is closed. """ if not self._storage_file: raise IOError(u'Unable to write to closed storage writer.') @@ -3421,7 +3306,7 @@ def WriteSessionStart(self): Raises: IOError: if the storage type is not supported or - when the storage writer is closed. + when the storage writer is closed. """ if not self._storage_file: raise IOError(u'Unable to write to closed storage writer.') @@ -3440,7 +3325,7 @@ def WriteTaskCompletion(self, aborted=False): Raises: IOError: if the storage type is not supported or - when the storage writer is closed. + when the storage writer is closed. """ if not self._storage_file: raise IOError(u'Unable to write to closed storage writer.') @@ -3457,7 +3342,7 @@ def WriteTaskStart(self): Raises: IOError: if the storage type is not supported or - when the storage writer is closed. + when the storage writer is closed. """ if not self._storage_file: raise IOError(u'Unable to write to closed storage writer.') diff --git a/test_data/end_to_end/dynamic.log b/test_data/end_to_end/dynamic.log index 773f0f8a0c..49bd846ee5 100644 --- a/test_data/end_to_end/dynamic.log +++ b/test_data/end_to_end/dynamic.log @@ -1,23 +1,23 @@ datetime,timestamp_desc,source,source_long,message,parser,display_name,tag -2016-01-22T07:52:33+00:00,Content Modification Time,LOG,Log File,[client pid: 30840] INFO No change in [/etc/netgroup]. Done,syslog,OS:/tmp/test/test_data/syslog,- -2016-01-22T07:52:33+00:00,Content Modification Time,LOG,Log File,[client pid: 30840] INFO No new content in ímynd.dd.,syslog,OS:/tmp/test/test_data/syslog,- -2016-01-22T07:53:01+00:00,Content Modification Time,LOG,Cron log,Cron ran: touch /var/run/crond.somecheck for user: root pid: 31051,syslog,OS:/tmp/test/test_data/syslog,- -2016-01-22T07:54:01+00:00,Content Modification Time,LOG,Cron log,Cron ran: /sbin/status.mycheck) for user: root pid: 31067,syslog,OS:/tmp/test/test_data/syslog,- -2016-01-22T07:54:01+00:00,Content Modification Time,LOG,Cron log,Cron ran: touch /var/run/crond.somecheck for user: root pid: 31068,syslog,OS:/tmp/test/test_data/syslog,- -2016-01-22T07:54:32+00:00,Content Modification Time,LOG,Log File,[Job] `cron.daily' terminated,syslog,OS:/tmp/test/test_data/syslog,- -2016-02-06T15:16:30+00:00,Content Modification Time,LOG,Log File,[process pid: 2085] Test message with single character day,syslog,OS:/tmp/test/test_data/syslog,- -2016-02-29T01:15:43+00:00,Content Modification Time,LOG,Log File,[---] testing leap year in parsing events take place in 2012 ---,syslog,OS:/tmp/test/test_data/syslog,- -2016-03-23T23:01:18+00:00,Content Modification Time,LOG,Log File,[somrandomexe pid: 19] This syslog message has a fractional value for seconds.,syslog,OS:/tmp/test/test_data/syslog,- -2016-03-23T23:01:18+00:00,Content Modification Time,LOG,Log File,[somrandomexe pid: 1915] This syslog message is brought to you by me (and not the other guy),syslog,OS:/tmp/test/test_data/syslog,- -2016-10-16T15:13:43+00:00,ctime,FILE,OS ctime,OS:/tmp/test/test_data/syslog Type: file,filestat,OS:/tmp/test/test_data/syslog,- -2016-10-16T15:13:43+00:00,ctime,FILE,OS ctime,OS:/tmp/test/test_data/syslog Type: file,filestat,OS:/tmp/test/test_data/syslog,- -2016-10-16T15:13:43+00:00,mtime,FILE,OS mtime,OS:/tmp/test/test_data/syslog Type: file,filestat,OS:/tmp/test/test_data/syslog,- -2016-10-16T15:13:43+00:00,mtime,FILE,OS mtime,OS:/tmp/test/test_data/syslog Type: file,filestat,OS:/tmp/test/test_data/syslog,- -2016-10-16T15:13:44+00:00,atime,FILE,OS atime,OS:/tmp/test/test_data/syslog Type: file,filestat,OS:/tmp/test/test_data/syslog,- -2016-10-16T15:13:44+00:00,atime,FILE,OS atime,OS:/tmp/test/test_data/syslog Type: file,filestat,OS:/tmp/test/test_data/syslog,- -2016-11-18T01:15:20+00:00,Content Modification Time,LOG,Log File,[aprocess pid: 10100] This is a multi-line message that screws up many syslog parsers.,syslog,OS:/tmp/test/test_data/syslog,- -2016-11-18T01:15:43+00:00,Content Modification Time,LOG,Log File,[---] last message repeated 5 times ---,syslog,OS:/tmp/test/test_data/syslog,repeated -2016-11-18T08:30:20+00:00,Content Modification Time,LOG,Log File,[kernel] [997.390602] sda2: rw=0 want=65 limit=2,syslog,OS:/tmp/test/test_data/syslog,- -2016-11-18T08:31:20+00:00,Content Modification Time,LOG,Log File,[kernel] [998.390602] sda2: rw=0 want=66 limit=2,syslog,OS:/tmp/test/test_data/syslog,- -2016-12-18T17:54:32+00:00,Content Modification Time,LOG,Log File,[anacron pid: 1234] No true exit can exist (124 job run),syslog,OS:/tmp/test/test_data/syslog,exit -2016-12-31T17:54:32+00:00,Content Modification Time,LOG,Log File,[/sbin/anacron pid: 1234] Another one just like this (124 job run),syslog,OS:/tmp/test/test_data/syslog,- +2012-01-22T07:52:33+00:00,Content Modification Time,LOG,Log File,[client pid: 30840] INFO No new content in ímynd.dd.,syslog,OS:/tmp/test/test_data/syslog,- +2012-01-22T07:52:33+00:00,Content Modification Time,LOG,Log File,[client pid: 30840] INFO No change in [/etc/netgroup]. Done,syslog,OS:/tmp/test/test_data/syslog,- +2012-01-22T07:53:01+00:00,Content Modification Time,LOG,Cron log,Cron ran: touch /var/run/crond.somecheck for user: root pid: 31051,syslog,OS:/tmp/test/test_data/syslog,- +2012-01-22T07:54:01+00:00,Content Modification Time,LOG,Cron log,Cron ran: touch /var/run/crond.somecheck for user: root pid: 31068,syslog,OS:/tmp/test/test_data/syslog,- +2012-01-22T07:54:01+00:00,Content Modification Time,LOG,Cron log,Cron ran: /sbin/status.mycheck) for user: root pid: 31067,syslog,OS:/tmp/test/test_data/syslog,- +2012-01-22T07:54:32+00:00,Content Modification Time,LOG,Log File,[Job] `cron.daily' terminated,syslog,OS:/tmp/test/test_data/syslog,- +2012-02-29T01:15:43+00:00,Content Modification Time,LOG,Log File,[---] testing leap year in parsing events take place in 2012 ---,syslog,OS:/tmp/test/test_data/syslog,- +2012-12-18T17:54:32+00:00,Content Modification Time,LOG,Log File,[anacron pid: 1234] No true exit can exist (124 job run),syslog,OS:/tmp/test/test_data/syslog,exit +2013-03-23T23:01:18+00:00,Content Modification Time,LOG,Log File,[somrandomexe pid: 1915] This syslog message is brought to you by me (and not the other guy),syslog,OS:/tmp/test/test_data/syslog,- +2013-03-23T23:01:18+00:00,Content Modification Time,LOG,Log File,[somrandomexe pid: 19] This syslog message has a fractional value for seconds.,syslog,OS:/tmp/test/test_data/syslog,- +2013-11-18T01:15:20+00:00,Content Modification Time,LOG,Log File,[aprocess pid: 10100] This is a multi-line message that screws up many syslog parsers.,syslog,OS:/tmp/test/test_data/syslog,- +2013-12-31T17:54:32+00:00,Content Modification Time,LOG,Log File,[/sbin/anacron pid: 1234] Another one just like this (124 job run),syslog,OS:/tmp/test/test_data/syslog,- +2014-02-06T15:16:30+00:00,Content Modification Time,LOG,Log File,[process pid: 2085] Test message with single character day,syslog,OS:/tmp/test/test_data/syslog,- +2014-11-18T01:15:43+00:00,Content Modification Time,LOG,Log File,[---] last message repeated 5 times ---,syslog,OS:/tmp/test/test_data/syslog,repeated +2014-11-18T08:30:20+00:00,Content Modification Time,LOG,Log File,[kernel] [997.390602] sda2: rw=0 want=65 limit=2,syslog,OS:/tmp/test/test_data/syslog,- +2014-11-18T08:31:20+00:00,Content Modification Time,LOG,Log File,[kernel] [998.390602] sda2: rw=0 want=66 limit=2,syslog,OS:/tmp/test/test_data/syslog,- +2017-01-22T12:50:22+00:00,ctime,FILE,OS ctime,OS:/tmp/test/test_data/syslog Type: file,filestat,OS:/tmp/test/test_data/syslog,- +2017-01-22T12:50:22+00:00,ctime,FILE,OS ctime,OS:/tmp/test/test_data/syslog Type: file,filestat,OS:/tmp/test/test_data/syslog,- +2017-01-22T12:50:22+00:00,mtime,FILE,OS mtime,OS:/tmp/test/test_data/syslog Type: file,filestat,OS:/tmp/test/test_data/syslog,- +2017-01-22T12:50:22+00:00,mtime,FILE,OS mtime,OS:/tmp/test/test_data/syslog Type: file,filestat,OS:/tmp/test/test_data/syslog,- +2017-01-22T12:50:23+00:00,atime,FILE,OS atime,OS:/tmp/test/test_data/syslog Type: file,filestat,OS:/tmp/test/test_data/syslog,- +2017-01-22T12:50:24+00:00,atime,FILE,OS atime,OS:/tmp/test/test_data/syslog Type: file,filestat,OS:/tmp/test/test_data/syslog,- diff --git a/test_data/end_to_end/json.log b/test_data/end_to_end/json.log index 6272c06b43..573ea01fee 100644 --- a/test_data/end_to_end/json.log +++ b/test_data/end_to_end/json.log @@ -1,23 +1,23 @@ -{"event_0": {"body": "INFO No change in [/etc/netgroup]. Done", "username": "-", "hostname": "myhostname.myhost.com", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "b6b05a44e5894600893157d56ca285a5", "data_type": "syslog:line", "_store_index": 0, "timestamp": 1453449153000000, "pid": 30840, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "reporter": "client", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event"} -, "event_1": {"body": "INFO No new content in \u00edmynd.dd.", "username": "-", "hostname": "myhostname.myhost.com", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "33c5c3b331404fcb8ae0ed12706d257f", "data_type": "syslog:line", "_store_index": 1, "timestamp": 1453449153000000, "pid": 30840, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "reporter": "client", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event"} -, "event_2": {"body": "(root) CMD (touch /var/run/crond.somecheck)", "username": "root", "hostname": "myhostname.myhost.com", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "3d7a996296714dd8b2cf52ead3d0a653", "data_type": "syslog:cron:task_run", "_store_index": 2, "timestamp": 1453449181000000, "pid": 31051, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "command": "touch /var/run/crond.somecheck", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "reporter": "CRON", "inode": 0, "__container_type__": "event"} -, "event_3": {"body": "(root) CMD (/sbin/status.mycheck))", "username": "root", "hostname": "myhostname.myhost.com", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "621480ad919c428a8bc1427ad1c22268", "data_type": "syslog:cron:task_run", "_store_index": 3, "timestamp": 1453449241000000, "pid": 31067, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "command": "/sbin/status.mycheck)", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "reporter": "CRON", "inode": 0, "__container_type__": "event"} -, "event_4": {"body": "(root) CMD (touch /var/run/crond.somecheck)", "username": "root", "hostname": "myhostname.myhost.com", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "7f6d04ccd9d2418e9f804b6900fd49a3", "data_type": "syslog:cron:task_run", "_store_index": 4, "timestamp": 1453449241000000, "pid": 31068, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "command": "touch /var/run/crond.somecheck", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "reporter": "CRON", "inode": 0, "__container_type__": "event"} -, "event_5": {"body": "`cron.daily' terminated", "username": "-", "hostname": "myhostname.myhost.com", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "2ccc05492ec1411783f1b67f5fe264cb", "data_type": "syslog:line", "_store_index": 5, "timestamp": 1453449272000000, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "reporter": "Job", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event"} -, "event_6": {"body": "Test message with single character day", "username": "-", "hostname": "victoria", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "81f8ba8705c24f4cb49bf048d3449ce8", "data_type": "syslog:line", "_store_index": 6, "timestamp": 1454771790000000, "pid": 2085, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "reporter": "process", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event"} -, "event_7": {"body": "testing leap year in parsing, events take place in 2012 ---", "username": "-", "hostname": ":", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "ee75539c5bec4770a82d46b476e7a114", "data_type": "syslog:line", "_store_index": 7, "timestamp": 1456708543000000, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "reporter": "---", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event"} -, "event_8": {"body": "This syslog message has a fractional value for seconds.", "username": "-", "hostname": "myhostname.myhost.com", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "7d9ae0a43b7f4f379917ae686388c305", "data_type": "syslog:line", "_store_index": 8, "timestamp": 1458774078000000, "pid": 19, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "reporter": "somrandomexe", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event"} -, "event_9": {"body": "This syslog message is brought to you by me (and not the other guy)", "username": "-", "hostname": "myhostname.myhost.com", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "e87c7fc02497414d99506c08b5e815a7", "data_type": "syslog:line", "_store_index": 9, "timestamp": 1458774078000000, "pid": 1915, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "reporter": "somrandomexe", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event"} -, "event_10": {"username": "-", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "639c744e25004ce0bbfe653c8919ad97", "data_type": "fs:stat", "_store_index": 11, "timestamp": 1476630823000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "ctime", "_store_number": 1, "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event", "file_entry_type": 3} -, "event_11": {"username": "-", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "cf817ac91b124848b432197d9cdfb393", "data_type": "fs:stat", "_store_index": 11, "timestamp": 1476630823000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "ctime", "_store_number": 2, "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event", "file_entry_type": 3} -, "event_12": {"username": "-", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "4293feb525354f84bf77171dfc1e736b", "data_type": "fs:stat", "_store_index": 10, "timestamp": 1476630823000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "mtime", "_store_number": 1, "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event", "file_entry_type": 3} -, "event_13": {"username": "-", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "b1cd4d4f5d2e456da010f092ee594d88", "data_type": "fs:stat", "_store_index": 10, "timestamp": 1476630823000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "mtime", "_store_number": 2, "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event", "file_entry_type": 3} -, "event_14": {"username": "-", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "197d44ae30ab4d8ca97fcc4f95cf8a3d", "data_type": "fs:stat", "_store_index": 12, "timestamp": 1476630824000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "atime", "_store_number": 1, "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event", "file_entry_type": 3} -, "event_15": {"username": "-", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "14e53f82155540208addd65baf52efb2", "data_type": "fs:stat", "_store_index": 12, "timestamp": 1476630824000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "atime", "_store_number": 2, "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event", "file_entry_type": 3} -, "event_16": {"body": "This is a multi-line message that screws up\n\tmany syslog parsers.", "username": "-", "hostname": "myhostname.myhost.com", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "70607b0510464ab2a0e630aa562a784b", "data_type": "syslog:line", "_store_index": 13, "timestamp": 1479431720000000, "pid": 10100, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "reporter": "aprocess", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event"} -, "event_17": {"body": "last message repeated 5 times ---", "username": "-", "hostname": ":", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "9c61b48e327a460aba7c54ad3d69cbe6", "data_type": "syslog:line", "_store_index": 14, "timestamp": 1479431743000000, "parser": "syslog", "__type__": "AttributeContainer", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "timestamp_desc": "Content Modification Time", "tag": {"comment": "Tag applied by tagging analysis plugin.", "event_uuid": "9c61b48e327a460aba7c54ad3d69cbe6", "labels": [{"__type__": "bytes", "stream": "repeated"}], "__type__": "AttributeContainer", "__container_type__": "event_tag"}, "reporter": "---", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "filename": "/tmp/test/test_data/syslog", "inode": 0, "__container_type__": "event"} -, "event_18": {"body": "[997.390602] sda2: rw=0, want=65, limit=2", "username": "-", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "15dcc4c388f640558217f89a884c3cf9", "data_type": "syslog:line", "_store_index": 15, "timestamp": 1479457820000000, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "_store_number": 2, "reporter": "kernel", "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event"} -, "event_19": {"body": "[998.390602] sda2: rw=0, want=66, limit=2", "username": "-", "hostname": "victoria", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "9408c2b7c740426c8633f0a2749d647b", "data_type": "syslog:line", "_store_index": 16, "timestamp": 1479457880000000, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "reporter": "kernel", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event"} -, "event_20": {"body": "No true exit can exist (124 job run)", "username": "-", "hostname": "myhostname.myhost.com", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "0a536db8a8f549c983328dcf748e6292", "data_type": "syslog:line", "_store_index": 17, "timestamp": 1482083672000000, "pid": 1234, "parser": "syslog", "__type__": "AttributeContainer", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "timestamp_desc": "Content Modification Time", "tag": {"comment": "Tag applied by tagging analysis plugin.", "event_uuid": "0a536db8a8f549c983328dcf748e6292", "labels": [{"__type__": "bytes", "stream": "exit"}], "__type__": "AttributeContainer", "__container_type__": "event_tag"}, "reporter": "anacron", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "filename": "/tmp/test/test_data/syslog", "inode": 0, "__container_type__": "event"} -, "event_21": {"body": "Another one just like this (124 job run)", "username": "-", "hostname": "myhostname.myhost.com", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "c8b6ca0fbcc942068a7bb042af4fe410", "data_type": "syslog:line", "_store_index": 18, "timestamp": 1483206872000000, "pid": 1234, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "reporter": "/sbin/anacron", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event"} +{"event_0": {"body": "INFO No new content in \u00edmynd.dd.", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "b772f7c47bd74cada4958882fd584235", "data_type": "syslog:line", "reporter": "client", "__type__": "AttributeContainer", "parser": "syslog", "pid": 30840, "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "timestamp": 1327218753000000, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "hostname": "myhostname.myhost.com", "__container_type__": "event"} +, "event_1": {"body": "INFO No change in [/etc/netgroup]. Done", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "f29c517962ed429ab6b9beb36d4a6e7b", "data_type": "syslog:line", "reporter": "client", "__type__": "AttributeContainer", "parser": "syslog", "pid": 30840, "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "timestamp": 1327218753000000, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "hostname": "myhostname.myhost.com", "__container_type__": "event"} +, "event_2": {"body": "(root) CMD (touch /var/run/crond.somecheck)", "username": "root", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "7ecc0ac55129481786d6dcb44ed65cfb", "data_type": "syslog:cron:task_run", "reporter": "CRON", "__type__": "AttributeContainer", "parser": "syslog", "pid": 31051, "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "command": "touch /var/run/crond.somecheck", "timestamp": 1327218781000000, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "hostname": "myhostname.myhost.com", "__container_type__": "event"} +, "event_3": {"body": "(root) CMD (touch /var/run/crond.somecheck)", "username": "root", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "5de7ebfff90645118be6b38428cfb0f9", "data_type": "syslog:cron:task_run", "reporter": "CRON", "__type__": "AttributeContainer", "parser": "syslog", "pid": 31068, "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "command": "touch /var/run/crond.somecheck", "timestamp": 1327218841000000, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "hostname": "myhostname.myhost.com", "__container_type__": "event"} +, "event_4": {"body": "(root) CMD (/sbin/status.mycheck))", "username": "root", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "7c6163426d3946eebc8c2f72d5f38491", "data_type": "syslog:cron:task_run", "reporter": "CRON", "__type__": "AttributeContainer", "parser": "syslog", "pid": 31067, "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "command": "/sbin/status.mycheck)", "timestamp": 1327218841000000, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "hostname": "myhostname.myhost.com", "__container_type__": "event"} +, "event_5": {"body": "`cron.daily' terminated", "hostname": "myhostname.myhost.com", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "f2acc98c322b432b9bb83b0602e15663", "data_type": "syslog:line", "reporter": "Job", "timestamp": 1327218872000000, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event"} +, "event_6": {"body": "testing leap year in parsing, events take place in 2012 ---", "hostname": ":", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "97730b0dcce945a8962d052b366e707d", "data_type": "syslog:line", "reporter": "---", "timestamp": 1330478143000000, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event"} +, "event_7": {"body": "No true exit can exist (124 job run)", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "96181c13094540399a4c3bd10ce8cf3a", "data_type": "syslog:line", "reporter": "anacron", "__type__": "AttributeContainer", "parser": "syslog", "pid": 1234, "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "tag": {"comment": "Tag applied by tagging analysis plugin.Tag applied by tagging analysis plugin.", "event_stream_number": 2, "labels": [{"__type__": "bytes", "stream": "exit"}], "__type__": "AttributeContainer", "event_entry_index": 7, "__container_type__": "event_tag"}, "timestamp": 1355853272000000, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "hostname": "myhostname.myhost.com", "__container_type__": "event"} +, "event_8": {"body": "This syslog message is brought to you by me (and not the other guy)", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "4e41c8877b074a26a0b30fa1ac93ffc0", "data_type": "syslog:line", "reporter": "somrandomexe", "__type__": "AttributeContainer", "parser": "syslog", "pid": 1915, "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "timestamp": 1364079678000000, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "hostname": "myhostname.myhost.com", "__container_type__": "event"} +, "event_9": {"body": "This syslog message has a fractional value for seconds.", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "efd4517523f94d459be68adb22cfe5b6", "data_type": "syslog:line", "reporter": "somrandomexe", "__type__": "AttributeContainer", "parser": "syslog", "pid": 19, "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "timestamp": 1364079678000000, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "hostname": "myhostname.myhost.com", "__container_type__": "event"} +, "event_10": {"body": "This is a multi-line message that screws up\n\tmany syslog parsers.", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "8024613e5914418c8eb29920c4d71742", "data_type": "syslog:line", "reporter": "aprocess", "__type__": "AttributeContainer", "parser": "syslog", "pid": 10100, "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "timestamp": 1384737320000000, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "hostname": "myhostname.myhost.com", "__container_type__": "event"} +, "event_11": {"body": "Another one just like this (124 job run)", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "88b622a8772d443c8081bf5937e701ac", "data_type": "syslog:line", "reporter": "/sbin/anacron", "__type__": "AttributeContainer", "parser": "syslog", "pid": 1234, "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "timestamp": 1388512472000000, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "hostname": "myhostname.myhost.com", "__container_type__": "event"} +, "event_12": {"body": "Test message with single character day", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "85116abe7d734047867d3e8310336ba5", "data_type": "syslog:line", "reporter": "process", "__type__": "AttributeContainer", "parser": "syslog", "pid": 2085, "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "timestamp": 1391699790000000, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "hostname": "victoria", "__container_type__": "event"} +, "event_13": {"body": "last message repeated 5 times ---", "hostname": ":", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "b712672143fa4b3babee4adace1350d2", "data_type": "syslog:line", "reporter": "---", "timestamp": 1416273343000000, "parser": "syslog", "__type__": "AttributeContainer", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "timestamp_desc": "Content Modification Time", "tag": {"comment": "Tag applied by tagging analysis plugin.Tag applied by tagging analysis plugin.", "event_stream_number": 2, "labels": [{"__type__": "bytes", "stream": "repeated"}], "__type__": "AttributeContainer", "event_entry_index": 13, "__container_type__": "event_tag"}, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "filename": "/tmp/test/test_data/syslog", "inode": 0, "__container_type__": "event"} +, "event_14": {"body": "[997.390602] sda2: rw=0, want=65, limit=2", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "191199c1d15542e5abc794e9c20c8077", "data_type": "syslog:line", "reporter": "kernel", "timestamp": 1416299420000000, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event"} +, "event_15": {"body": "[998.390602] sda2: rw=0, want=66, limit=2", "hostname": "victoria", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "f2b98e2782ce404c8686bd7ab1959b2f", "data_type": "syslog:line", "reporter": "kernel", "timestamp": 1416299480000000, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event"} +, "event_16": {"sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "1944adacf18d4cbba60473f0bcfb36db", "data_type": "fs:stat", "timestamp": 1485089422000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "ctime", "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event", "file_entry_type": 3} +, "event_17": {"sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "7a3b8a5983744af6b85d7da5628e17cc", "data_type": "fs:stat", "timestamp": 1485089422000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "ctime", "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event", "file_entry_type": 3} +, "event_18": {"sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "cf0a3d87a128414d8b840ad66eece27e", "data_type": "fs:stat", "timestamp": 1485089422000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "mtime", "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event", "file_entry_type": 3} +, "event_19": {"sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "f7340e3eab664f87adc14c4059cea7c0", "data_type": "fs:stat", "timestamp": 1485089422000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "mtime", "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event", "file_entry_type": 3} +, "event_20": {"sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "1b6677c64cd846f0ac19e9154b7dfe1e", "data_type": "fs:stat", "timestamp": 1485089423000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "atime", "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event", "file_entry_type": 3} +, "event_21": {"sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "2802d34626b4464cbac7f25bd903e251", "data_type": "fs:stat", "timestamp": 1485089424000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "atime", "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "inode": 0, "__container_type__": "event", "file_entry_type": 3} } \ No newline at end of file diff --git a/test_data/end_to_end/json_line.log b/test_data/end_to_end/json_line.log index 612c191b9d..c3379bb9f9 100644 --- a/test_data/end_to_end/json_line.log +++ b/test_data/end_to_end/json_line.log @@ -1,22 +1,22 @@ -{"body": "INFO No change in [/etc/netgroup]. Done", "username": "-", "hostname": "myhostname.myhost.com", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "b6b05a44e5894600893157d56ca285a5", "data_type": "syslog:line", "_store_index": 0, "timestamp": 1453449153000000, "pid": 30840, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "reporter": "client", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[client, pid: 30840] INFO No change in [/etc/netgroup]. Done", "inode": 0, "__container_type__": "event"} -{"body": "INFO No new content in \u00edmynd.dd.", "username": "-", "hostname": "myhostname.myhost.com", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "33c5c3b331404fcb8ae0ed12706d257f", "data_type": "syslog:line", "_store_index": 1, "timestamp": 1453449153000000, "pid": 30840, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "reporter": "client", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[client, pid: 30840] INFO No new content in \u00edmynd.dd.", "inode": 0, "__container_type__": "event"} -{"username": "root", "_store_index": 2, "parser": "syslog", "pid": 31051, "message": "Cron ran: touch /var/run/crond.somecheck for user: root pid: 31051", "inode": 0, "__container_type__": "event", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "3d7a996296714dd8b2cf52ead3d0a653", "hostname": "myhostname.myhost.com", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "body": "(root) CMD (touch /var/run/crond.somecheck)", "data_type": "syslog:cron:task_run", "reporter": "CRON", "timestamp": 1453449181000000, "offset": 0, "timestamp_desc": "Content Modification Time", "_store_number": 2, "command": "touch /var/run/crond.somecheck"} -{"username": "root", "_store_index": 3, "parser": "syslog", "pid": 31067, "message": "Cron ran: /sbin/status.mycheck) for user: root pid: 31067", "inode": 0, "__container_type__": "event", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "621480ad919c428a8bc1427ad1c22268", "hostname": "myhostname.myhost.com", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "body": "(root) CMD (/sbin/status.mycheck))", "data_type": "syslog:cron:task_run", "reporter": "CRON", "timestamp": 1453449241000000, "offset": 0, "timestamp_desc": "Content Modification Time", "_store_number": 2, "command": "/sbin/status.mycheck)"} -{"username": "root", "_store_index": 4, "parser": "syslog", "pid": 31068, "message": "Cron ran: touch /var/run/crond.somecheck for user: root pid: 31068", "inode": 0, "__container_type__": "event", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "7f6d04ccd9d2418e9f804b6900fd49a3", "hostname": "myhostname.myhost.com", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "body": "(root) CMD (touch /var/run/crond.somecheck)", "data_type": "syslog:cron:task_run", "reporter": "CRON", "timestamp": 1453449241000000, "offset": 0, "timestamp_desc": "Content Modification Time", "_store_number": 2, "command": "touch /var/run/crond.somecheck"} -{"body": "`cron.daily' terminated", "username": "-", "hostname": "myhostname.myhost.com", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "2ccc05492ec1411783f1b67f5fe264cb", "data_type": "syslog:line", "_store_index": 5, "timestamp": 1453449272000000, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "reporter": "Job", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[Job] `cron.daily' terminated", "inode": 0, "__container_type__": "event"} -{"body": "Test message with single character day", "username": "-", "hostname": "victoria", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "81f8ba8705c24f4cb49bf048d3449ce8", "data_type": "syslog:line", "_store_index": 6, "timestamp": 1454771790000000, "pid": 2085, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "reporter": "process", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[process, pid: 2085] Test message with single character day", "inode": 0, "__container_type__": "event"} -{"body": "testing leap year in parsing, events take place in 2012 ---", "username": "-", "hostname": ":", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "ee75539c5bec4770a82d46b476e7a114", "data_type": "syslog:line", "_store_index": 7, "timestamp": 1456708543000000, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "reporter": "---", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[---] testing leap year in parsing, events take place in 2012 ---", "inode": 0, "__container_type__": "event"} -{"body": "This syslog message has a fractional value for seconds.", "username": "-", "hostname": "myhostname.myhost.com", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "7d9ae0a43b7f4f379917ae686388c305", "data_type": "syslog:line", "_store_index": 8, "timestamp": 1458774078000000, "pid": 19, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "reporter": "somrandomexe", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[somrandomexe, pid: 19] This syslog message has a fractional value for seconds.", "inode": 0, "__container_type__": "event"} -{"body": "This syslog message is brought to you by me (and not the other guy)", "username": "-", "hostname": "myhostname.myhost.com", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "e87c7fc02497414d99506c08b5e815a7", "data_type": "syslog:line", "_store_index": 9, "timestamp": 1458774078000000, "pid": 1915, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "reporter": "somrandomexe", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[somrandomexe, pid: 1915] This syslog message is brought to you by me (and not the other guy)", "inode": 0, "__container_type__": "event"} -{"username": "-", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "639c744e25004ce0bbfe653c8919ad97", "data_type": "fs:stat", "_store_index": 11, "timestamp": 1476630823000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "ctime", "_store_number": 1, "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "OS:/tmp/test/test_data/syslog Type: file", "inode": 0, "__container_type__": "event", "file_entry_type": 3} -{"username": "-", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "cf817ac91b124848b432197d9cdfb393", "data_type": "fs:stat", "_store_index": 11, "timestamp": 1476630823000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "ctime", "_store_number": 2, "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "OS:/tmp/test/test_data/syslog Type: file", "inode": 0, "__container_type__": "event", "file_entry_type": 3} -{"username": "-", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "4293feb525354f84bf77171dfc1e736b", "data_type": "fs:stat", "_store_index": 10, "timestamp": 1476630823000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "mtime", "_store_number": 1, "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "OS:/tmp/test/test_data/syslog Type: file", "inode": 0, "__container_type__": "event", "file_entry_type": 3} -{"username": "-", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "b1cd4d4f5d2e456da010f092ee594d88", "data_type": "fs:stat", "_store_index": 10, "timestamp": 1476630823000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "mtime", "_store_number": 2, "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "OS:/tmp/test/test_data/syslog Type: file", "inode": 0, "__container_type__": "event", "file_entry_type": 3} -{"username": "-", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "197d44ae30ab4d8ca97fcc4f95cf8a3d", "data_type": "fs:stat", "_store_index": 12, "timestamp": 1476630824000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "atime", "_store_number": 1, "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "OS:/tmp/test/test_data/syslog Type: file", "inode": 0, "__container_type__": "event", "file_entry_type": 3} -{"username": "-", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "14e53f82155540208addd65baf52efb2", "data_type": "fs:stat", "_store_index": 12, "timestamp": 1476630824000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "atime", "_store_number": 2, "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "OS:/tmp/test/test_data/syslog Type: file", "inode": 0, "__container_type__": "event", "file_entry_type": 3} -{"body": "This is a multi-line message that screws up\n\tmany syslog parsers.", "username": "-", "hostname": "myhostname.myhost.com", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "70607b0510464ab2a0e630aa562a784b", "data_type": "syslog:line", "_store_index": 13, "timestamp": 1479431720000000, "pid": 10100, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "reporter": "aprocess", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[aprocess, pid: 10100] This is a multi-line message that screws up\tmany syslog parsers.", "inode": 0, "__container_type__": "event"} -{"body": "last message repeated 5 times ---", "username": "-", "hostname": ":", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "9c61b48e327a460aba7c54ad3d69cbe6", "data_type": "syslog:line", "_store_index": 14, "timestamp": 1479431743000000, "parser": "syslog", "__type__": "AttributeContainer", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "timestamp_desc": "Content Modification Time", "tag": {"comment": "Tag applied by tagging analysis plugin.", "event_uuid": "9c61b48e327a460aba7c54ad3d69cbe6", "labels": [{"__type__": "bytes", "stream": "repeated"}], "__type__": "AttributeContainer", "__container_type__": "event_tag"}, "reporter": "---", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[---] last message repeated 5 times ---", "filename": "/tmp/test/test_data/syslog", "inode": 0, "__container_type__": "event"} -{"body": "[997.390602] sda2: rw=0, want=65, limit=2", "username": "-", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "15dcc4c388f640558217f89a884c3cf9", "data_type": "syslog:line", "_store_index": 15, "timestamp": 1479457820000000, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "_store_number": 2, "reporter": "kernel", "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[kernel] [997.390602] sda2: rw=0, want=65, limit=2", "inode": 0, "__container_type__": "event"} -{"body": "[998.390602] sda2: rw=0, want=66, limit=2", "username": "-", "hostname": "victoria", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "9408c2b7c740426c8633f0a2749d647b", "data_type": "syslog:line", "_store_index": 16, "timestamp": 1479457880000000, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "reporter": "kernel", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[kernel] [998.390602] sda2: rw=0, want=66, limit=2", "inode": 0, "__container_type__": "event"} -{"username": "-", "_store_index": 17, "parser": "syslog", "pid": 1234, "tag": {"comment": "Tag applied by tagging analysis plugin.", "event_uuid": "0a536db8a8f549c983328dcf748e6292", "labels": [{"__type__": "bytes", "stream": "exit"}], "__type__": "AttributeContainer", "__container_type__": "event_tag"}, "message": "[anacron, pid: 1234] No true exit can exist (124 job run)", "inode": 0, "__container_type__": "event", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "0a536db8a8f549c983328dcf748e6292", "hostname": "myhostname.myhost.com", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "body": "No true exit can exist (124 job run)", "data_type": "syslog:line", "reporter": "anacron", "timestamp": 1482083672000000, "offset": 0, "timestamp_desc": "Content Modification Time", "_store_number": 2} -{"body": "Another one just like this (124 job run)", "username": "-", "hostname": "myhostname.myhost.com", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "c8b6ca0fbcc942068a7bb042af4fe410", "data_type": "syslog:line", "_store_index": 18, "timestamp": 1483206872000000, "pid": 1234, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "reporter": "/sbin/anacron", "_store_number": 2, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[/sbin/anacron, pid: 1234] Another one just like this (124 job run)", "inode": 0, "__container_type__": "event"} +{"body": "INFO No new content in \u00edmynd.dd.", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "b772f7c47bd74cada4958882fd584235", "data_type": "syslog:line", "reporter": "client", "__type__": "AttributeContainer", "parser": "syslog", "pid": 30840, "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "timestamp": 1327218753000000, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[client, pid: 30840] INFO No new content in \u00edmynd.dd.", "inode": 0, "hostname": "myhostname.myhost.com", "__container_type__": "event"} +{"body": "INFO No change in [/etc/netgroup]. Done", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "f29c517962ed429ab6b9beb36d4a6e7b", "data_type": "syslog:line", "reporter": "client", "__type__": "AttributeContainer", "parser": "syslog", "pid": 30840, "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "timestamp": 1327218753000000, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[client, pid: 30840] INFO No change in [/etc/netgroup]. Done", "inode": 0, "hostname": "myhostname.myhost.com", "__container_type__": "event"} +{"body": "(root) CMD (touch /var/run/crond.somecheck)", "username": "root", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "7ecc0ac55129481786d6dcb44ed65cfb", "data_type": "syslog:cron:task_run", "reporter": "CRON", "__type__": "AttributeContainer", "parser": "syslog", "pid": 31051, "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "command": "touch /var/run/crond.somecheck", "timestamp": 1327218781000000, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "Cron ran: touch /var/run/crond.somecheck for user: root pid: 31051", "inode": 0, "hostname": "myhostname.myhost.com", "__container_type__": "event"} +{"body": "(root) CMD (touch /var/run/crond.somecheck)", "username": "root", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "5de7ebfff90645118be6b38428cfb0f9", "data_type": "syslog:cron:task_run", "reporter": "CRON", "__type__": "AttributeContainer", "parser": "syslog", "pid": 31068, "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "command": "touch /var/run/crond.somecheck", "timestamp": 1327218841000000, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "Cron ran: touch /var/run/crond.somecheck for user: root pid: 31068", "inode": 0, "hostname": "myhostname.myhost.com", "__container_type__": "event"} +{"body": "(root) CMD (/sbin/status.mycheck))", "username": "root", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "7c6163426d3946eebc8c2f72d5f38491", "data_type": "syslog:cron:task_run", "reporter": "CRON", "__type__": "AttributeContainer", "parser": "syslog", "pid": 31067, "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "command": "/sbin/status.mycheck)", "timestamp": 1327218841000000, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "Cron ran: /sbin/status.mycheck) for user: root pid: 31067", "inode": 0, "hostname": "myhostname.myhost.com", "__container_type__": "event"} +{"body": "`cron.daily' terminated", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "f2acc98c322b432b9bb83b0602e15663", "data_type": "syslog:line", "reporter": "Job", "timestamp": 1327218872000000, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[Job] `cron.daily' terminated", "inode": 0, "hostname": "myhostname.myhost.com", "__container_type__": "event"} +{"body": "testing leap year in parsing, events take place in 2012 ---", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "97730b0dcce945a8962d052b366e707d", "data_type": "syslog:line", "reporter": "---", "timestamp": 1330478143000000, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[---] testing leap year in parsing, events take place in 2012 ---", "inode": 0, "hostname": ":", "__container_type__": "event"} +{"body": "No true exit can exist (124 job run)", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "96181c13094540399a4c3bd10ce8cf3a", "data_type": "syslog:line", "reporter": "anacron", "__type__": "AttributeContainer", "parser": "syslog", "pid": 1234, "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "tag": {"comment": "Tag applied by tagging analysis plugin.Tag applied by tagging analysis plugin.", "event_stream_number": 2, "labels": [{"__type__": "bytes", "stream": "exit"}], "__type__": "AttributeContainer", "event_entry_index": 7, "__container_type__": "event_tag"}, "timestamp": 1355853272000000, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[anacron, pid: 1234] No true exit can exist (124 job run)", "inode": 0, "hostname": "myhostname.myhost.com", "__container_type__": "event"} +{"body": "This syslog message is brought to you by me (and not the other guy)", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "4e41c8877b074a26a0b30fa1ac93ffc0", "data_type": "syslog:line", "reporter": "somrandomexe", "__type__": "AttributeContainer", "parser": "syslog", "pid": 1915, "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "timestamp": 1364079678000000, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[somrandomexe, pid: 1915] This syslog message is brought to you by me (and not the other guy)", "inode": 0, "hostname": "myhostname.myhost.com", "__container_type__": "event"} +{"body": "This syslog message has a fractional value for seconds.", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "efd4517523f94d459be68adb22cfe5b6", "data_type": "syslog:line", "reporter": "somrandomexe", "__type__": "AttributeContainer", "parser": "syslog", "pid": 19, "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "timestamp": 1364079678000000, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[somrandomexe, pid: 19] This syslog message has a fractional value for seconds.", "inode": 0, "hostname": "myhostname.myhost.com", "__container_type__": "event"} +{"body": "This is a multi-line message that screws up\n\tmany syslog parsers.", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "8024613e5914418c8eb29920c4d71742", "data_type": "syslog:line", "reporter": "aprocess", "__type__": "AttributeContainer", "parser": "syslog", "pid": 10100, "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "timestamp": 1384737320000000, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[aprocess, pid: 10100] This is a multi-line message that screws up\tmany syslog parsers.", "inode": 0, "hostname": "myhostname.myhost.com", "__container_type__": "event"} +{"body": "Another one just like this (124 job run)", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "88b622a8772d443c8081bf5937e701ac", "data_type": "syslog:line", "reporter": "/sbin/anacron", "__type__": "AttributeContainer", "parser": "syslog", "pid": 1234, "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "timestamp": 1388512472000000, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[/sbin/anacron, pid: 1234] Another one just like this (124 job run)", "inode": 0, "hostname": "myhostname.myhost.com", "__container_type__": "event"} +{"body": "Test message with single character day", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "85116abe7d734047867d3e8310336ba5", "data_type": "syslog:line", "reporter": "process", "__type__": "AttributeContainer", "parser": "syslog", "pid": 2085, "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "timestamp": 1391699790000000, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[process, pid: 2085] Test message with single character day", "inode": 0, "hostname": "victoria", "__container_type__": "event"} +{"body": "last message repeated 5 times ---", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "b712672143fa4b3babee4adace1350d2", "data_type": "syslog:line", "reporter": "---", "timestamp": 1416273343000000, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "tag": {"comment": "Tag applied by tagging analysis plugin.Tag applied by tagging analysis plugin.", "event_stream_number": 2, "labels": [{"__type__": "bytes", "stream": "repeated"}], "__type__": "AttributeContainer", "event_entry_index": 13, "__container_type__": "event_tag"}, "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[---] last message repeated 5 times ---", "inode": 0, "hostname": ":", "__container_type__": "event"} +{"body": "[997.390602] sda2: rw=0, want=65, limit=2", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "191199c1d15542e5abc794e9c20c8077", "data_type": "syslog:line", "reporter": "kernel", "timestamp": 1416299420000000, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[kernel] [997.390602] sda2: rw=0, want=65, limit=2", "inode": 0, "__container_type__": "event"} +{"body": "[998.390602] sda2: rw=0, want=66, limit=2", "sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "f2b98e2782ce404c8686bd7ab1959b2f", "data_type": "syslog:line", "reporter": "kernel", "timestamp": 1416299480000000, "parser": "syslog", "__type__": "AttributeContainer", "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "Content Modification Time", "offset": 0, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "[kernel] [998.390602] sda2: rw=0, want=66, limit=2", "inode": 0, "hostname": "victoria", "__container_type__": "event"} +{"sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "1944adacf18d4cbba60473f0bcfb36db", "data_type": "fs:stat", "timestamp": 1485089422000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "ctime", "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "OS:/tmp/test/test_data/syslog Type: file", "inode": 0, "__container_type__": "event", "file_entry_type": 3} +{"sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "7a3b8a5983744af6b85d7da5628e17cc", "data_type": "fs:stat", "timestamp": 1485089422000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "ctime", "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "OS:/tmp/test/test_data/syslog Type: file", "inode": 0, "__container_type__": "event", "file_entry_type": 3} +{"sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "cf0a3d87a128414d8b840ad66eece27e", "data_type": "fs:stat", "timestamp": 1485089422000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "mtime", "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "OS:/tmp/test/test_data/syslog Type: file", "inode": 0, "__container_type__": "event", "file_entry_type": 3} +{"sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "f7340e3eab664f87adc14c4059cea7c0", "data_type": "fs:stat", "timestamp": 1485089422000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "mtime", "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "OS:/tmp/test/test_data/syslog Type: file", "inode": 0, "__container_type__": "event", "file_entry_type": 3} +{"sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "1b6677c64cd846f0ac19e9154b7dfe1e", "data_type": "fs:stat", "timestamp": 1485089423000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "atime", "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "OS:/tmp/test/test_data/syslog Type: file", "inode": 0, "__container_type__": "event", "file_entry_type": 3} +{"sha256_hash": "1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4", "display_name": "OS:/tmp/test/test_data/syslog", "uuid": "2802d34626b4464cbac7f25bd903e251", "data_type": "fs:stat", "timestamp": 1485089424000000, "is_allocated": true, "parser": "filestat", "__type__": "AttributeContainer", "offset": 0, "filename": "/tmp/test/test_data/syslog", "timestamp_desc": "atime", "file_system_type": "OS", "file_size": {"values": [1509], "__type__": "tuple"}, "pathspec": {"type_indicator": "OS", "__type__": "PathSpec", "location": "/tmp/test/test_data/syslog"}, "message": "OS:/tmp/test/test_data/syslog Type: file", "inode": 0, "__container_type__": "event", "file_entry_type": 3} diff --git a/test_data/end_to_end/l2tcsv.log b/test_data/end_to_end/l2tcsv.log index a603ebd4c3..d967e876fb 100644 --- a/test_data/end_to_end/l2tcsv.log +++ b/test_data/end_to_end/l2tcsv.log @@ -1,23 +1,23 @@ date,time,timezone,MACB,source,sourcetype,type,user,host,short,desc,version,filename,inode,notes,format,extra -01/22/2016,07:52:33,UTC,M...,LOG,Log File,Content Modification Time,-,myhostname.myhost.com,[client pid: 30840] INFO No change in [/etc/netgroup]. Done,[client pid: 30840] INFO No change in [/etc/netgroup]. Done,2,OS:/tmp/test/test_data/syslog,-,-,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 -01/22/2016,07:52:33,UTC,M...,LOG,Log File,Content Modification Time,-,myhostname.myhost.com,[client pid: 30840] INFO No new content in ímynd.dd.,[client pid: 30840] INFO No new content in ímynd.dd.,2,OS:/tmp/test/test_data/syslog,-,-,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 -01/22/2016,07:53:01,UTC,M...,LOG,Cron log,Content Modification Time,root,myhostname.myhost.com,Cron ran: touch /var/run/crond.somecheck for user: root pid: 31051,Cron ran: touch /var/run/crond.somecheck for user: root pid: 31051,2,OS:/tmp/test/test_data/syslog,-,-,syslog,reporter: CRON sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 -01/22/2016,07:54:01,UTC,M...,LOG,Cron log,Content Modification Time,root,myhostname.myhost.com,Cron ran: /sbin/status.mycheck) for user: root pid: 31067,Cron ran: /sbin/status.mycheck) for user: root pid: 31067,2,OS:/tmp/test/test_data/syslog,-,-,syslog,reporter: CRON sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 -01/22/2016,07:54:01,UTC,M...,LOG,Cron log,Content Modification Time,root,myhostname.myhost.com,Cron ran: touch /var/run/crond.somecheck for user: root pid: 31068,Cron ran: touch /var/run/crond.somecheck for user: root pid: 31068,2,OS:/tmp/test/test_data/syslog,-,-,syslog,reporter: CRON sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 -01/22/2016,07:54:32,UTC,M...,LOG,Log File,Content Modification Time,-,myhostname.myhost.com,[Job] `cron.daily' terminated,[Job] `cron.daily' terminated,2,OS:/tmp/test/test_data/syslog,-,-,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 -02/06/2016,15:16:30,UTC,M...,LOG,Log File,Content Modification Time,-,victoria,[process pid: 2085] Test message with single character day,[process pid: 2085] Test message with single character day,2,OS:/tmp/test/test_data/syslog,-,-,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 -02/29/2016,01:15:43,UTC,M...,LOG,Log File,Content Modification Time,-,:,[---] testing leap year in parsing events take place in 2012 ---,[---] testing leap year in parsing events take place in 2012 ---,2,OS:/tmp/test/test_data/syslog,-,-,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 -03/23/2016,23:01:18,UTC,M...,LOG,Log File,Content Modification Time,-,myhostname.myhost.com,[somrandomexe pid: 19] This syslog message has a fractional value for seconds.,[somrandomexe pid: 19] This syslog message has a fractional value for seconds.,2,OS:/tmp/test/test_data/syslog,-,-,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 -03/23/2016,23:01:18,UTC,M...,LOG,Log File,Content Modification Time,-,myhostname.myhost.com,[somrandomexe pid: 1915] This syslog message is brought to you by me (and no...,[somrandomexe pid: 1915] This syslog message is brought to you by me (and not the other guy),2,OS:/tmp/test/test_data/syslog,-,-,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 -10/16/2016,15:13:43,UTC,..C.,FILE,OS ctime,ctime,-,-,/tmp/test/test_data/syslog,OS:/tmp/test/test_data/syslog Type: file,2,OS:/tmp/test/test_data/syslog,-,-,filestat,file_size: (1509 ) file_system_type: OS is_allocated: True sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 -10/16/2016,15:13:43,UTC,..C.,FILE,OS ctime,ctime,-,-,/tmp/test/test_data/syslog,OS:/tmp/test/test_data/syslog Type: file,2,OS:/tmp/test/test_data/syslog,-,-,filestat,file_size: (1509 ) file_system_type: OS is_allocated: True sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 -10/16/2016,15:13:43,UTC,M...,FILE,OS mtime,mtime,-,-,/tmp/test/test_data/syslog,OS:/tmp/test/test_data/syslog Type: file,2,OS:/tmp/test/test_data/syslog,-,-,filestat,file_size: (1509 ) file_system_type: OS is_allocated: True sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 -10/16/2016,15:13:43,UTC,M...,FILE,OS mtime,mtime,-,-,/tmp/test/test_data/syslog,OS:/tmp/test/test_data/syslog Type: file,2,OS:/tmp/test/test_data/syslog,-,-,filestat,file_size: (1509 ) file_system_type: OS is_allocated: True sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 -10/16/2016,15:13:44,UTC,.A..,FILE,OS atime,atime,-,-,/tmp/test/test_data/syslog,OS:/tmp/test/test_data/syslog Type: file,2,OS:/tmp/test/test_data/syslog,-,-,filestat,file_size: (1509 ) file_system_type: OS is_allocated: True sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 -10/16/2016,15:13:44,UTC,.A..,FILE,OS atime,atime,-,-,/tmp/test/test_data/syslog,OS:/tmp/test/test_data/syslog Type: file,2,OS:/tmp/test/test_data/syslog,-,-,filestat,file_size: (1509 ) file_system_type: OS is_allocated: True sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 -11/18/2016,01:15:20,UTC,M...,LOG,Log File,Content Modification Time,-,myhostname.myhost.com,[aprocess pid: 10100] This is a multi-line message that screws up many syslo...,[aprocess pid: 10100] This is a multi-line message that screws up many syslog parsers.,2,OS:/tmp/test/test_data/syslog,-,-,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 -11/18/2016,01:15:43,UTC,M...,LOG,Log File,Content Modification Time,-,:,[---] last message repeated 5 times ---,[---] last message repeated 5 times ---,2,OS:/tmp/test/test_data/syslog,-,repeated,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 -11/18/2016,08:30:20,UTC,M...,LOG,Log File,Content Modification Time,-,-,[kernel] [997.390602] sda2: rw=0 want=65 limit=2,[kernel] [997.390602] sda2: rw=0 want=65 limit=2,2,OS:/tmp/test/test_data/syslog,-,-,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 -11/18/2016,08:31:20,UTC,M...,LOG,Log File,Content Modification Time,-,victoria,[kernel] [998.390602] sda2: rw=0 want=66 limit=2,[kernel] [998.390602] sda2: rw=0 want=66 limit=2,2,OS:/tmp/test/test_data/syslog,-,-,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 -12/18/2016,17:54:32,UTC,M...,LOG,Log File,Content Modification Time,-,myhostname.myhost.com,[anacron pid: 1234] No true exit can exist (124 job run),[anacron pid: 1234] No true exit can exist (124 job run),2,OS:/tmp/test/test_data/syslog,-,exit,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 -12/31/2016,17:54:32,UTC,M...,LOG,Log File,Content Modification Time,-,myhostname.myhost.com,[/sbin/anacron pid: 1234] Another one just like this (124 job run),[/sbin/anacron pid: 1234] Another one just like this (124 job run),2,OS:/tmp/test/test_data/syslog,-,-,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +01/22/2012,07:52:33,UTC,M...,LOG,Log File,Content Modification Time,-,myhostname.myhost.com,[client pid: 30840] INFO No new content in ímynd.dd.,[client pid: 30840] INFO No new content in ímynd.dd.,2,OS:/tmp/test/test_data/syslog,-,-,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +01/22/2012,07:52:33,UTC,M...,LOG,Log File,Content Modification Time,-,myhostname.myhost.com,[client pid: 30840] INFO No change in [/etc/netgroup]. Done,[client pid: 30840] INFO No change in [/etc/netgroup]. Done,2,OS:/tmp/test/test_data/syslog,-,-,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +01/22/2012,07:53:01,UTC,M...,LOG,Cron log,Content Modification Time,root,myhostname.myhost.com,Cron ran: touch /var/run/crond.somecheck for user: root pid: 31051,Cron ran: touch /var/run/crond.somecheck for user: root pid: 31051,2,OS:/tmp/test/test_data/syslog,-,-,syslog,reporter: CRON sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +01/22/2012,07:54:01,UTC,M...,LOG,Cron log,Content Modification Time,root,myhostname.myhost.com,Cron ran: touch /var/run/crond.somecheck for user: root pid: 31068,Cron ran: touch /var/run/crond.somecheck for user: root pid: 31068,2,OS:/tmp/test/test_data/syslog,-,-,syslog,reporter: CRON sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +01/22/2012,07:54:01,UTC,M...,LOG,Cron log,Content Modification Time,root,myhostname.myhost.com,Cron ran: /sbin/status.mycheck) for user: root pid: 31067,Cron ran: /sbin/status.mycheck) for user: root pid: 31067,2,OS:/tmp/test/test_data/syslog,-,-,syslog,reporter: CRON sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +01/22/2012,07:54:32,UTC,M...,LOG,Log File,Content Modification Time,-,myhostname.myhost.com,[Job] `cron.daily' terminated,[Job] `cron.daily' terminated,2,OS:/tmp/test/test_data/syslog,-,-,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +02/29/2012,01:15:43,UTC,M...,LOG,Log File,Content Modification Time,-,:,[---] testing leap year in parsing events take place in 2012 ---,[---] testing leap year in parsing events take place in 2012 ---,2,OS:/tmp/test/test_data/syslog,-,-,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +12/18/2012,17:54:32,UTC,M...,LOG,Log File,Content Modification Time,-,myhostname.myhost.com,[anacron pid: 1234] No true exit can exist (124 job run),[anacron pid: 1234] No true exit can exist (124 job run),2,OS:/tmp/test/test_data/syslog,-,exit,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +03/23/2013,23:01:18,UTC,M...,LOG,Log File,Content Modification Time,-,myhostname.myhost.com,[somrandomexe pid: 1915] This syslog message is brought to you by me (and no...,[somrandomexe pid: 1915] This syslog message is brought to you by me (and not the other guy),2,OS:/tmp/test/test_data/syslog,-,-,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +03/23/2013,23:01:18,UTC,M...,LOG,Log File,Content Modification Time,-,myhostname.myhost.com,[somrandomexe pid: 19] This syslog message has a fractional value for seconds.,[somrandomexe pid: 19] This syslog message has a fractional value for seconds.,2,OS:/tmp/test/test_data/syslog,-,-,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +11/18/2013,01:15:20,UTC,M...,LOG,Log File,Content Modification Time,-,myhostname.myhost.com,[aprocess pid: 10100] This is a multi-line message that screws up many syslo...,[aprocess pid: 10100] This is a multi-line message that screws up many syslog parsers.,2,OS:/tmp/test/test_data/syslog,-,-,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +12/31/2013,17:54:32,UTC,M...,LOG,Log File,Content Modification Time,-,myhostname.myhost.com,[/sbin/anacron pid: 1234] Another one just like this (124 job run),[/sbin/anacron pid: 1234] Another one just like this (124 job run),2,OS:/tmp/test/test_data/syslog,-,-,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +02/06/2014,15:16:30,UTC,M...,LOG,Log File,Content Modification Time,-,victoria,[process pid: 2085] Test message with single character day,[process pid: 2085] Test message with single character day,2,OS:/tmp/test/test_data/syslog,-,-,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +11/18/2014,01:15:43,UTC,M...,LOG,Log File,Content Modification Time,-,:,[---] last message repeated 5 times ---,[---] last message repeated 5 times ---,2,OS:/tmp/test/test_data/syslog,-,repeated,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +11/18/2014,08:30:20,UTC,M...,LOG,Log File,Content Modification Time,-,-,[kernel] [997.390602] sda2: rw=0 want=65 limit=2,[kernel] [997.390602] sda2: rw=0 want=65 limit=2,2,OS:/tmp/test/test_data/syslog,-,-,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +11/18/2014,08:31:20,UTC,M...,LOG,Log File,Content Modification Time,-,victoria,[kernel] [998.390602] sda2: rw=0 want=66 limit=2,[kernel] [998.390602] sda2: rw=0 want=66 limit=2,2,OS:/tmp/test/test_data/syslog,-,-,syslog,sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +01/22/2017,12:50:22,UTC,..C.,FILE,OS ctime,ctime,-,-,/tmp/test/test_data/syslog,OS:/tmp/test/test_data/syslog Type: file,2,OS:/tmp/test/test_data/syslog,-,-,filestat,file_size: (1509 ) file_system_type: OS is_allocated: True sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +01/22/2017,12:50:22,UTC,..C.,FILE,OS ctime,ctime,-,-,/tmp/test/test_data/syslog,OS:/tmp/test/test_data/syslog Type: file,2,OS:/tmp/test/test_data/syslog,-,-,filestat,file_size: (1509 ) file_system_type: OS is_allocated: True sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +01/22/2017,12:50:22,UTC,M...,FILE,OS mtime,mtime,-,-,/tmp/test/test_data/syslog,OS:/tmp/test/test_data/syslog Type: file,2,OS:/tmp/test/test_data/syslog,-,-,filestat,file_size: (1509 ) file_system_type: OS is_allocated: True sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +01/22/2017,12:50:22,UTC,M...,FILE,OS mtime,mtime,-,-,/tmp/test/test_data/syslog,OS:/tmp/test/test_data/syslog Type: file,2,OS:/tmp/test/test_data/syslog,-,-,filestat,file_size: (1509 ) file_system_type: OS is_allocated: True sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +01/22/2017,12:50:23,UTC,.A..,FILE,OS atime,atime,-,-,/tmp/test/test_data/syslog,OS:/tmp/test/test_data/syslog Type: file,2,OS:/tmp/test/test_data/syslog,-,-,filestat,file_size: (1509 ) file_system_type: OS is_allocated: True sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +01/22/2017,12:50:24,UTC,.A..,FILE,OS atime,atime,-,-,/tmp/test/test_data/syslog,OS:/tmp/test/test_data/syslog Type: file,2,OS:/tmp/test/test_data/syslog,-,-,filestat,file_size: (1509 ) file_system_type: OS is_allocated: True sha256_hash: 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 diff --git a/test_data/end_to_end/l2ttln.log b/test_data/end_to_end/l2ttln.log index 3027f287fe..9a08348d97 100644 --- a/test_data/end_to_end/l2ttln.log +++ b/test_data/end_to_end/l2ttln.log @@ -1,23 +1,23 @@ Time|Source|Host|User|Description|TZ|Notes -1453449153|LOG|myhostname.myhost.com|-|2016-01-22T07:52:33+00:00; Content Modification Time; [client, pid: 30840] INFO No change in [/etc/netgroup]. Done|UTC|File: OS:/tmp/test/test_data/syslog inode: - -1453449153|LOG|myhostname.myhost.com|-|2016-01-22T07:52:33+00:00; Content Modification Time; [client, pid: 30840] INFO No new content in ímynd.dd.|UTC|File: OS:/tmp/test/test_data/syslog inode: - -1453449181|LOG|myhostname.myhost.com|root|2016-01-22T07:53:01+00:00; Content Modification Time; Cron ran: touch /var/run/crond.somecheck for user: root pid: 31051|UTC|File: OS:/tmp/test/test_data/syslog inode: - -1453449241|LOG|myhostname.myhost.com|root|2016-01-22T07:54:01+00:00; Content Modification Time; Cron ran: /sbin/status.mycheck) for user: root pid: 31067|UTC|File: OS:/tmp/test/test_data/syslog inode: - -1453449241|LOG|myhostname.myhost.com|root|2016-01-22T07:54:01+00:00; Content Modification Time; Cron ran: touch /var/run/crond.somecheck for user: root pid: 31068|UTC|File: OS:/tmp/test/test_data/syslog inode: - -1453449272|LOG|myhostname.myhost.com|-|2016-01-22T07:54:32+00:00; Content Modification Time; [Job] `cron.daily' terminated|UTC|File: OS:/tmp/test/test_data/syslog inode: - -1454771790|LOG|victoria|-|2016-02-06T15:16:30+00:00; Content Modification Time; [process, pid: 2085] Test message with single character day|UTC|File: OS:/tmp/test/test_data/syslog inode: - -1456708543|LOG|:|-|2016-02-29T01:15:43+00:00; Content Modification Time; [---] testing leap year in parsing, events take place in 2012 ---|UTC|File: OS:/tmp/test/test_data/syslog inode: - -1458774078|LOG|myhostname.myhost.com|-|2016-03-23T23:01:18+00:00; Content Modification Time; [somrandomexe, pid: 19] This syslog message has a fractional value for seconds.|UTC|File: OS:/tmp/test/test_data/syslog inode: - -1458774078|LOG|myhostname.myhost.com|-|2016-03-23T23:01:18+00:00; Content Modification Time; [somrandomexe, pid: 1915] This syslog message is brought to you by me (and not the other guy)|UTC|File: OS:/tmp/test/test_data/syslog inode: - -1476630823|FILE|-|-|2016-10-16T15:13:43+00:00; ctime; OS:/tmp/test/test_data/syslog Type: file|UTC|File: OS:/tmp/test/test_data/syslog inode: - -1476630823|FILE|-|-|2016-10-16T15:13:43+00:00; ctime; OS:/tmp/test/test_data/syslog Type: file|UTC|File: OS:/tmp/test/test_data/syslog inode: - -1476630823|FILE|-|-|2016-10-16T15:13:43+00:00; mtime; OS:/tmp/test/test_data/syslog Type: file|UTC|File: OS:/tmp/test/test_data/syslog inode: - -1476630823|FILE|-|-|2016-10-16T15:13:43+00:00; mtime; OS:/tmp/test/test_data/syslog Type: file|UTC|File: OS:/tmp/test/test_data/syslog inode: - -1476630824|FILE|-|-|2016-10-16T15:13:44+00:00; atime; OS:/tmp/test/test_data/syslog Type: file|UTC|File: OS:/tmp/test/test_data/syslog inode: - -1476630824|FILE|-|-|2016-10-16T15:13:44+00:00; atime; OS:/tmp/test/test_data/syslog Type: file|UTC|File: OS:/tmp/test/test_data/syslog inode: - -1479431720|LOG|myhostname.myhost.com|-|2016-11-18T01:15:20+00:00; Content Modification Time; [aprocess, pid: 10100] This is a multi-line message that screws up many syslog parsers.|UTC|File: OS:/tmp/test/test_data/syslog inode: - -1479431743|LOG|:|-|2016-11-18T01:15:43+00:00; Content Modification Time; [---] last message repeated 5 times ---|UTC|File: OS:/tmp/test/test_data/syslog inode: - -1479457820|LOG|-|-|2016-11-18T08:30:20+00:00; Content Modification Time; [kernel] [997.390602] sda2: rw=0, want=65, limit=2|UTC|File: OS:/tmp/test/test_data/syslog inode: - -1479457880|LOG|victoria|-|2016-11-18T08:31:20+00:00; Content Modification Time; [kernel] [998.390602] sda2: rw=0, want=66, limit=2|UTC|File: OS:/tmp/test/test_data/syslog inode: - -1482083672|LOG|myhostname.myhost.com|-|2016-12-18T17:54:32+00:00; Content Modification Time; [anacron, pid: 1234] No true exit can exist (124 job run)|UTC|File: OS:/tmp/test/test_data/syslog inode: - -1483206872|LOG|myhostname.myhost.com|-|2016-12-31T17:54:32+00:00; Content Modification Time; [/sbin/anacron, pid: 1234] Another one just like this (124 job run)|UTC|File: OS:/tmp/test/test_data/syslog inode: - +1327218753|LOG|myhostname.myhost.com|-|2012-01-22T07:52:33+00:00; Content Modification Time; [client, pid: 30840] INFO No new content in ímynd.dd.|UTC|File: OS:/tmp/test/test_data/syslog inode: - +1327218753|LOG|myhostname.myhost.com|-|2012-01-22T07:52:33+00:00; Content Modification Time; [client, pid: 30840] INFO No change in [/etc/netgroup]. Done|UTC|File: OS:/tmp/test/test_data/syslog inode: - +1327218781|LOG|myhostname.myhost.com|root|2012-01-22T07:53:01+00:00; Content Modification Time; Cron ran: touch /var/run/crond.somecheck for user: root pid: 31051|UTC|File: OS:/tmp/test/test_data/syslog inode: - +1327218841|LOG|myhostname.myhost.com|root|2012-01-22T07:54:01+00:00; Content Modification Time; Cron ran: touch /var/run/crond.somecheck for user: root pid: 31068|UTC|File: OS:/tmp/test/test_data/syslog inode: - +1327218841|LOG|myhostname.myhost.com|root|2012-01-22T07:54:01+00:00; Content Modification Time; Cron ran: /sbin/status.mycheck) for user: root pid: 31067|UTC|File: OS:/tmp/test/test_data/syslog inode: - +1327218872|LOG|myhostname.myhost.com|-|2012-01-22T07:54:32+00:00; Content Modification Time; [Job] `cron.daily' terminated|UTC|File: OS:/tmp/test/test_data/syslog inode: - +1330478143|LOG|:|-|2012-02-29T01:15:43+00:00; Content Modification Time; [---] testing leap year in parsing, events take place in 2012 ---|UTC|File: OS:/tmp/test/test_data/syslog inode: - +1355853272|LOG|myhostname.myhost.com|-|2012-12-18T17:54:32+00:00; Content Modification Time; [anacron, pid: 1234] No true exit can exist (124 job run)|UTC|File: OS:/tmp/test/test_data/syslog inode: - +1364079678|LOG|myhostname.myhost.com|-|2013-03-23T23:01:18+00:00; Content Modification Time; [somrandomexe, pid: 1915] This syslog message is brought to you by me (and not the other guy)|UTC|File: OS:/tmp/test/test_data/syslog inode: - +1364079678|LOG|myhostname.myhost.com|-|2013-03-23T23:01:18+00:00; Content Modification Time; [somrandomexe, pid: 19] This syslog message has a fractional value for seconds.|UTC|File: OS:/tmp/test/test_data/syslog inode: - +1384737320|LOG|myhostname.myhost.com|-|2013-11-18T01:15:20+00:00; Content Modification Time; [aprocess, pid: 10100] This is a multi-line message that screws up many syslog parsers.|UTC|File: OS:/tmp/test/test_data/syslog inode: - +1388512472|LOG|myhostname.myhost.com|-|2013-12-31T17:54:32+00:00; Content Modification Time; [/sbin/anacron, pid: 1234] Another one just like this (124 job run)|UTC|File: OS:/tmp/test/test_data/syslog inode: - +1391699790|LOG|victoria|-|2014-02-06T15:16:30+00:00; Content Modification Time; [process, pid: 2085] Test message with single character day|UTC|File: OS:/tmp/test/test_data/syslog inode: - +1416273343|LOG|:|-|2014-11-18T01:15:43+00:00; Content Modification Time; [---] last message repeated 5 times ---|UTC|File: OS:/tmp/test/test_data/syslog inode: - +1416299420|LOG|-|-|2014-11-18T08:30:20+00:00; Content Modification Time; [kernel] [997.390602] sda2: rw=0, want=65, limit=2|UTC|File: OS:/tmp/test/test_data/syslog inode: - +1416299480|LOG|victoria|-|2014-11-18T08:31:20+00:00; Content Modification Time; [kernel] [998.390602] sda2: rw=0, want=66, limit=2|UTC|File: OS:/tmp/test/test_data/syslog inode: - +1485089422|FILE|-|-|2017-01-22T12:50:22+00:00; ctime; OS:/tmp/test/test_data/syslog Type: file|UTC|File: OS:/tmp/test/test_data/syslog inode: - +1485089422|FILE|-|-|2017-01-22T12:50:22+00:00; ctime; OS:/tmp/test/test_data/syslog Type: file|UTC|File: OS:/tmp/test/test_data/syslog inode: - +1485089422|FILE|-|-|2017-01-22T12:50:22+00:00; mtime; OS:/tmp/test/test_data/syslog Type: file|UTC|File: OS:/tmp/test/test_data/syslog inode: - +1485089422|FILE|-|-|2017-01-22T12:50:22+00:00; mtime; OS:/tmp/test/test_data/syslog Type: file|UTC|File: OS:/tmp/test/test_data/syslog inode: - +1485089423|FILE|-|-|2017-01-22T12:50:23+00:00; atime; OS:/tmp/test/test_data/syslog Type: file|UTC|File: OS:/tmp/test/test_data/syslog inode: - +1485089424|FILE|-|-|2017-01-22T12:50:24+00:00; atime; OS:/tmp/test/test_data/syslog Type: file|UTC|File: OS:/tmp/test/test_data/syslog inode: - diff --git a/test_data/end_to_end/rawpy.log b/test_data/end_to_end/rawpy.log index 77fd38c286..cdcb05bd10 100644 --- a/test_data/end_to_end/rawpy.log +++ b/test_data/end_to_end/rawpy.log @@ -1,24 +1,21 @@ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- [Timestamp]: - 2016-01-22T07:52:33+00:00 + 2012-01-22T07:52:33+00:00 [Pathspec]: type: OS, location: /tmp/test/test_data/syslog [Reserved attributes]: - {_store_index} 0 - {_store_number} 2 - {body} INFO No change in [/etc/netgroup]. Done + {body} INFO No new content in ímynd.dd. {data_type} syslog:line {display_name} OS:/tmp/test/test_data/syslog {filename} /tmp/test/test_data/syslog {hostname} myhostname.myhost.com {offset} 0 {parser} syslog - {timestamp} 1453449153000000 + {timestamp} 1327218753000000 {timestamp_desc} Content Modification Time - {username} - - {uuid} b6b05a44e5894600893157d56ca285a5 + {uuid} b772f7c47bd74cada4958882fd584235 [Additional attributes]: {pid} 30840 @@ -26,25 +23,22 @@ {sha256_hash} 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- [Timestamp]: - 2016-01-22T07:52:33+00:00 + 2012-01-22T07:52:33+00:00 [Pathspec]: type: OS, location: /tmp/test/test_data/syslog [Reserved attributes]: - {_store_index} 1 - {_store_number} 2 - {body} INFO No new content in ímynd.dd. + {body} INFO No change in [/etc/netgroup]. Done {data_type} syslog:line {display_name} OS:/tmp/test/test_data/syslog {filename} /tmp/test/test_data/syslog {hostname} myhostname.myhost.com {offset} 0 {parser} syslog - {timestamp} 1453449153000000 + {timestamp} 1327218753000000 {timestamp_desc} Content Modification Time - {username} - - {uuid} 33c5c3b331404fcb8ae0ed12706d257f + {uuid} f29c517962ed429ab6b9beb36d4a6e7b [Additional attributes]: {pid} 30840 @@ -52,14 +46,12 @@ {sha256_hash} 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- [Timestamp]: - 2016-01-22T07:53:01+00:00 + 2012-01-22T07:53:01+00:00 [Pathspec]: type: OS, location: /tmp/test/test_data/syslog [Reserved attributes]: - {_store_index} 2 - {_store_number} 2 {body} (root) CMD (touch /var/run/crond.somecheck) {data_type} syslog:cron:task_run {display_name} OS:/tmp/test/test_data/syslog @@ -67,10 +59,10 @@ {hostname} myhostname.myhost.com {offset} 0 {parser} syslog - {timestamp} 1453449181000000 + {timestamp} 1327218781000000 {timestamp_desc} Content Modification Time {username} root - {uuid} 3d7a996296714dd8b2cf52ead3d0a653 + {uuid} 7ecc0ac55129481786d6dcb44ed65cfb [Additional attributes]: {command} touch /var/run/crond.somecheck @@ -79,68 +71,62 @@ {sha256_hash} 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- [Timestamp]: - 2016-01-22T07:54:01+00:00 + 2012-01-22T07:54:01+00:00 [Pathspec]: type: OS, location: /tmp/test/test_data/syslog [Reserved attributes]: - {_store_index} 3 - {_store_number} 2 - {body} (root) CMD (/sbin/status.mycheck)) + {body} (root) CMD (touch /var/run/crond.somecheck) {data_type} syslog:cron:task_run {display_name} OS:/tmp/test/test_data/syslog {filename} /tmp/test/test_data/syslog {hostname} myhostname.myhost.com {offset} 0 {parser} syslog - {timestamp} 1453449241000000 + {timestamp} 1327218841000000 {timestamp_desc} Content Modification Time {username} root - {uuid} 621480ad919c428a8bc1427ad1c22268 + {uuid} 5de7ebfff90645118be6b38428cfb0f9 [Additional attributes]: - {command} /sbin/status.mycheck) - {pid} 31067 + {command} touch /var/run/crond.somecheck + {pid} 31068 {reporter} CRON {sha256_hash} 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- [Timestamp]: - 2016-01-22T07:54:01+00:00 + 2012-01-22T07:54:01+00:00 [Pathspec]: type: OS, location: /tmp/test/test_data/syslog [Reserved attributes]: - {_store_index} 4 - {_store_number} 2 - {body} (root) CMD (touch /var/run/crond.somecheck) + {body} (root) CMD (/sbin/status.mycheck)) {data_type} syslog:cron:task_run {display_name} OS:/tmp/test/test_data/syslog {filename} /tmp/test/test_data/syslog {hostname} myhostname.myhost.com {offset} 0 {parser} syslog - {timestamp} 1453449241000000 + {timestamp} 1327218841000000 {timestamp_desc} Content Modification Time {username} root - {uuid} 7f6d04ccd9d2418e9f804b6900fd49a3 + {uuid} 7c6163426d3946eebc8c2f72d5f38491 [Additional attributes]: - {command} touch /var/run/crond.somecheck - {pid} 31068 + {command} /sbin/status.mycheck) + {pid} 31067 {reporter} CRON {sha256_hash} 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- [Timestamp]: - 2016-01-22T07:54:32+00:00 + 2012-01-22T07:54:32+00:00 [Pathspec]: type: OS, location: /tmp/test/test_data/syslog [Reserved attributes]: - {_store_index} 5 - {_store_number} 2 {body} `cron.daily' terminated {data_type} syslog:line {display_name} OS:/tmp/test/test_data/syslog @@ -148,423 +134,374 @@ {hostname} myhostname.myhost.com {offset} 0 {parser} syslog - {timestamp} 1453449272000000 + {timestamp} 1327218872000000 {timestamp_desc} Content Modification Time - {username} - - {uuid} 2ccc05492ec1411783f1b67f5fe264cb + {uuid} f2acc98c322b432b9bb83b0602e15663 [Additional attributes]: {reporter} Job {sha256_hash} 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- [Timestamp]: - 2016-02-06T15:16:30+00:00 + 2012-02-29T01:15:43+00:00 [Pathspec]: type: OS, location: /tmp/test/test_data/syslog [Reserved attributes]: - {_store_index} 6 - {_store_number} 2 - {body} Test message with single character day + {body} testing leap year in parsing, events take place in 2012 --- {data_type} syslog:line {display_name} OS:/tmp/test/test_data/syslog {filename} /tmp/test/test_data/syslog - {hostname} victoria + {hostname} : {offset} 0 {parser} syslog - {timestamp} 1454771790000000 + {timestamp} 1330478143000000 {timestamp_desc} Content Modification Time - {username} - - {uuid} 81f8ba8705c24f4cb49bf048d3449ce8 + {uuid} 97730b0dcce945a8962d052b366e707d [Additional attributes]: - {pid} 2085 - {reporter} process + {reporter} --- {sha256_hash} 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- [Timestamp]: - 2016-02-29T01:15:43+00:00 + 2012-12-18T17:54:32+00:00 [Pathspec]: type: OS, location: /tmp/test/test_data/syslog [Reserved attributes]: - {_store_index} 7 - {_store_number} 2 - {body} testing leap year in parsing, events take place in 2012 --- + {body} No true exit can exist (124 job run) {data_type} syslog:line {display_name} OS:/tmp/test/test_data/syslog {filename} /tmp/test/test_data/syslog - {hostname} : + {hostname} myhostname.myhost.com {offset} 0 {parser} syslog - {timestamp} 1456708543000000 + {timestamp} 1355853272000000 {timestamp_desc} Content Modification Time - {username} - - {uuid} ee75539c5bec4770a82d46b476e7a114 + {uuid} 96181c13094540399a4c3bd10ce8cf3a [Additional attributes]: - {reporter} --- + {pid} 1234 + {reporter} anacron {sha256_hash} 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- [Timestamp]: - 2016-03-23T23:01:18+00:00 + 2013-03-23T23:01:18+00:00 [Pathspec]: type: OS, location: /tmp/test/test_data/syslog [Reserved attributes]: - {_store_index} 8 - {_store_number} 2 - {body} This syslog message has a fractional value for seconds. + {body} This syslog message is brought to you by me (and not the other guy) {data_type} syslog:line {display_name} OS:/tmp/test/test_data/syslog {filename} /tmp/test/test_data/syslog {hostname} myhostname.myhost.com {offset} 0 {parser} syslog - {timestamp} 1458774078000000 + {timestamp} 1364079678000000 {timestamp_desc} Content Modification Time - {username} - - {uuid} 7d9ae0a43b7f4f379917ae686388c305 + {uuid} 4e41c8877b074a26a0b30fa1ac93ffc0 [Additional attributes]: - {pid} 19 + {pid} 1915 {reporter} somrandomexe {sha256_hash} 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- [Timestamp]: - 2016-03-23T23:01:18+00:00 + 2013-03-23T23:01:18+00:00 [Pathspec]: type: OS, location: /tmp/test/test_data/syslog [Reserved attributes]: - {_store_index} 9 - {_store_number} 2 - {body} This syslog message is brought to you by me (and not the other guy) + {body} This syslog message has a fractional value for seconds. {data_type} syslog:line {display_name} OS:/tmp/test/test_data/syslog {filename} /tmp/test/test_data/syslog {hostname} myhostname.myhost.com {offset} 0 {parser} syslog - {timestamp} 1458774078000000 + {timestamp} 1364079678000000 {timestamp_desc} Content Modification Time - {username} - - {uuid} e87c7fc02497414d99506c08b5e815a7 + {uuid} efd4517523f94d459be68adb22cfe5b6 [Additional attributes]: - {pid} 1915 + {pid} 19 {reporter} somrandomexe {sha256_hash} 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- [Timestamp]: - 2016-10-16T15:13:43+00:00 + 2013-11-18T01:15:20+00:00 [Pathspec]: type: OS, location: /tmp/test/test_data/syslog [Reserved attributes]: - {_store_index} 11 - {_store_number} 1 - {data_type} fs:stat + {body} This is a multi-line message that screws up + many syslog parsers. + {data_type} syslog:line {display_name} OS:/tmp/test/test_data/syslog {filename} /tmp/test/test_data/syslog + {hostname} myhostname.myhost.com {offset} 0 - {parser} filestat - {timestamp} 1476630823000000 - {timestamp_desc} ctime - {username} - - {uuid} 639c744e25004ce0bbfe653c8919ad97 + {parser} syslog + {timestamp} 1384737320000000 + {timestamp_desc} Content Modification Time + {uuid} 8024613e5914418c8eb29920c4d71742 [Additional attributes]: - {file_entry_type} 3 - {file_size} (1509,) - {file_system_type} OS - {is_allocated} True + {pid} 10100 + {reporter} aprocess {sha256_hash} 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- [Timestamp]: - 2016-10-16T15:13:43+00:00 + 2013-12-31T17:54:32+00:00 [Pathspec]: type: OS, location: /tmp/test/test_data/syslog [Reserved attributes]: - {_store_index} 11 - {_store_number} 2 - {data_type} fs:stat + {body} Another one just like this (124 job run) + {data_type} syslog:line {display_name} OS:/tmp/test/test_data/syslog {filename} /tmp/test/test_data/syslog + {hostname} myhostname.myhost.com {offset} 0 - {parser} filestat - {timestamp} 1476630823000000 - {timestamp_desc} ctime - {username} - - {uuid} cf817ac91b124848b432197d9cdfb393 + {parser} syslog + {timestamp} 1388512472000000 + {timestamp_desc} Content Modification Time + {uuid} 88b622a8772d443c8081bf5937e701ac [Additional attributes]: - {file_entry_type} 3 - {file_size} (1509,) - {file_system_type} OS - {is_allocated} True + {pid} 1234 + {reporter} /sbin/anacron {sha256_hash} 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- [Timestamp]: - 2016-10-16T15:13:43+00:00 + 2014-02-06T15:16:30+00:00 [Pathspec]: type: OS, location: /tmp/test/test_data/syslog [Reserved attributes]: - {_store_index} 10 - {_store_number} 1 - {data_type} fs:stat + {body} Test message with single character day + {data_type} syslog:line {display_name} OS:/tmp/test/test_data/syslog {filename} /tmp/test/test_data/syslog + {hostname} victoria {offset} 0 - {parser} filestat - {timestamp} 1476630823000000 - {timestamp_desc} mtime - {username} - - {uuid} 4293feb525354f84bf77171dfc1e736b + {parser} syslog + {timestamp} 1391699790000000 + {timestamp_desc} Content Modification Time + {uuid} 85116abe7d734047867d3e8310336ba5 [Additional attributes]: - {file_entry_type} 3 - {file_size} (1509,) - {file_system_type} OS - {is_allocated} True + {pid} 2085 + {reporter} process {sha256_hash} 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- [Timestamp]: - 2016-10-16T15:13:43+00:00 + 2014-11-18T01:15:43+00:00 [Pathspec]: type: OS, location: /tmp/test/test_data/syslog [Reserved attributes]: - {_store_index} 10 - {_store_number} 2 - {data_type} fs:stat + {body} last message repeated 5 times --- + {data_type} syslog:line {display_name} OS:/tmp/test/test_data/syslog {filename} /tmp/test/test_data/syslog + {hostname} : {offset} 0 - {parser} filestat - {timestamp} 1476630823000000 - {timestamp_desc} mtime - {username} - - {uuid} b1cd4d4f5d2e456da010f092ee594d88 + {parser} syslog + {timestamp} 1416273343000000 + {timestamp_desc} Content Modification Time + {uuid} b712672143fa4b3babee4adace1350d2 [Additional attributes]: - {file_entry_type} 3 - {file_size} (1509,) - {file_system_type} OS - {is_allocated} True + {reporter} --- {sha256_hash} 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- [Timestamp]: - 2016-10-16T15:13:44+00:00 + 2014-11-18T08:30:20+00:00 [Pathspec]: type: OS, location: /tmp/test/test_data/syslog [Reserved attributes]: - {_store_index} 12 - {_store_number} 1 - {data_type} fs:stat + {body} [997.390602] sda2: rw=0, want=65, limit=2 + {data_type} syslog:line {display_name} OS:/tmp/test/test_data/syslog {filename} /tmp/test/test_data/syslog {offset} 0 - {parser} filestat - {timestamp} 1476630824000000 - {timestamp_desc} atime - {username} - - {uuid} 197d44ae30ab4d8ca97fcc4f95cf8a3d + {parser} syslog + {timestamp} 1416299420000000 + {timestamp_desc} Content Modification Time + {uuid} 191199c1d15542e5abc794e9c20c8077 [Additional attributes]: - {file_entry_type} 3 - {file_size} (1509,) - {file_system_type} OS - {is_allocated} True + {reporter} kernel {sha256_hash} 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- [Timestamp]: - 2016-10-16T15:13:44+00:00 + 2014-11-18T08:31:20+00:00 [Pathspec]: type: OS, location: /tmp/test/test_data/syslog [Reserved attributes]: - {_store_index} 12 - {_store_number} 2 - {data_type} fs:stat + {body} [998.390602] sda2: rw=0, want=66, limit=2 + {data_type} syslog:line {display_name} OS:/tmp/test/test_data/syslog {filename} /tmp/test/test_data/syslog + {hostname} victoria {offset} 0 - {parser} filestat - {timestamp} 1476630824000000 - {timestamp_desc} atime - {username} - - {uuid} 14e53f82155540208addd65baf52efb2 + {parser} syslog + {timestamp} 1416299480000000 + {timestamp_desc} Content Modification Time + {uuid} f2b98e2782ce404c8686bd7ab1959b2f [Additional attributes]: - {file_entry_type} 3 - {file_size} (1509,) - {file_system_type} OS - {is_allocated} True + {reporter} kernel {sha256_hash} 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- [Timestamp]: - 2016-11-18T01:15:20+00:00 + 2017-01-22T12:50:22+00:00 [Pathspec]: type: OS, location: /tmp/test/test_data/syslog [Reserved attributes]: - {_store_index} 13 - {_store_number} 2 - {body} This is a multi-line message that screws up - many syslog parsers. - {data_type} syslog:line + {data_type} fs:stat {display_name} OS:/tmp/test/test_data/syslog {filename} /tmp/test/test_data/syslog - {hostname} myhostname.myhost.com {offset} 0 - {parser} syslog - {timestamp} 1479431720000000 - {timestamp_desc} Content Modification Time - {username} - - {uuid} 70607b0510464ab2a0e630aa562a784b + {parser} filestat + {timestamp} 1485089422000000 + {timestamp_desc} ctime + {uuid} 1944adacf18d4cbba60473f0bcfb36db [Additional attributes]: - {pid} 10100 - {reporter} aprocess + {file_entry_type} 3 + {file_size} (1509,) + {file_system_type} OS + {is_allocated} True {sha256_hash} 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- [Timestamp]: - 2016-11-18T01:15:43+00:00 + 2017-01-22T12:50:22+00:00 [Pathspec]: type: OS, location: /tmp/test/test_data/syslog [Reserved attributes]: - {_store_index} 14 - {_store_number} 2 - {body} last message repeated 5 times --- - {data_type} syslog:line + {data_type} fs:stat {display_name} OS:/tmp/test/test_data/syslog {filename} /tmp/test/test_data/syslog - {hostname} : {offset} 0 - {parser} syslog - {timestamp} 1479431743000000 - {timestamp_desc} Content Modification Time - {username} - - {uuid} 9c61b48e327a460aba7c54ad3d69cbe6 + {parser} filestat + {timestamp} 1485089422000000 + {timestamp_desc} ctime + {uuid} 7a3b8a5983744af6b85d7da5628e17cc [Additional attributes]: - {reporter} --- + {file_entry_type} 3 + {file_size} (1509,) + {file_system_type} OS + {is_allocated} True {sha256_hash} 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- [Timestamp]: - 2016-11-18T08:30:20+00:00 + 2017-01-22T12:50:22+00:00 [Pathspec]: type: OS, location: /tmp/test/test_data/syslog [Reserved attributes]: - {_store_index} 15 - {_store_number} 2 - {body} [997.390602] sda2: rw=0, want=65, limit=2 - {data_type} syslog:line + {data_type} fs:stat {display_name} OS:/tmp/test/test_data/syslog {filename} /tmp/test/test_data/syslog {offset} 0 - {parser} syslog - {timestamp} 1479457820000000 - {timestamp_desc} Content Modification Time - {username} - - {uuid} 15dcc4c388f640558217f89a884c3cf9 + {parser} filestat + {timestamp} 1485089422000000 + {timestamp_desc} mtime + {uuid} cf0a3d87a128414d8b840ad66eece27e [Additional attributes]: - {reporter} kernel + {file_entry_type} 3 + {file_size} (1509,) + {file_system_type} OS + {is_allocated} True {sha256_hash} 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- [Timestamp]: - 2016-11-18T08:31:20+00:00 + 2017-01-22T12:50:22+00:00 [Pathspec]: type: OS, location: /tmp/test/test_data/syslog [Reserved attributes]: - {_store_index} 16 - {_store_number} 2 - {body} [998.390602] sda2: rw=0, want=66, limit=2 - {data_type} syslog:line + {data_type} fs:stat {display_name} OS:/tmp/test/test_data/syslog {filename} /tmp/test/test_data/syslog - {hostname} victoria {offset} 0 - {parser} syslog - {timestamp} 1479457880000000 - {timestamp_desc} Content Modification Time - {username} - - {uuid} 9408c2b7c740426c8633f0a2749d647b + {parser} filestat + {timestamp} 1485089422000000 + {timestamp_desc} mtime + {uuid} f7340e3eab664f87adc14c4059cea7c0 [Additional attributes]: - {reporter} kernel + {file_entry_type} 3 + {file_size} (1509,) + {file_system_type} OS + {is_allocated} True {sha256_hash} 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- [Timestamp]: - 2016-12-18T17:54:32+00:00 + 2017-01-22T12:50:23+00:00 [Pathspec]: type: OS, location: /tmp/test/test_data/syslog [Reserved attributes]: - {_store_index} 17 - {_store_number} 2 - {body} No true exit can exist (124 job run) - {data_type} syslog:line + {data_type} fs:stat {display_name} OS:/tmp/test/test_data/syslog {filename} /tmp/test/test_data/syslog - {hostname} myhostname.myhost.com {offset} 0 - {parser} syslog - {timestamp} 1482083672000000 - {timestamp_desc} Content Modification Time - {username} - - {uuid} 0a536db8a8f549c983328dcf748e6292 + {parser} filestat + {timestamp} 1485089423000000 + {timestamp_desc} atime + {uuid} 1b6677c64cd846f0ac19e9154b7dfe1e [Additional attributes]: - {pid} 1234 - {reporter} anacron + {file_entry_type} 3 + {file_size} (1509,) + {file_system_type} OS + {is_allocated} True {sha256_hash} 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+- [Timestamp]: - 2016-12-31T17:54:32+00:00 + 2017-01-22T12:50:24+00:00 [Pathspec]: type: OS, location: /tmp/test/test_data/syslog [Reserved attributes]: - {_store_index} 18 - {_store_number} 2 - {body} Another one just like this (124 job run) - {data_type} syslog:line + {data_type} fs:stat {display_name} OS:/tmp/test/test_data/syslog {filename} /tmp/test/test_data/syslog - {hostname} myhostname.myhost.com {offset} 0 - {parser} syslog - {timestamp} 1483206872000000 - {timestamp_desc} Content Modification Time - {username} - - {uuid} c8b6ca0fbcc942068a7bb042af4fe410 + {parser} filestat + {timestamp} 1485089424000000 + {timestamp_desc} atime + {uuid} 2802d34626b4464cbac7f25bd903e251 [Additional attributes]: - {pid} 1234 - {reporter} /sbin/anacron + {file_entry_type} 3 + {file_size} (1509,) + {file_system_type} OS + {is_allocated} True {sha256_hash} 1f0105612f6ad2d225d6bd9ba631148740e312598878adcd2b74098a3dab50c4 diff --git a/test_data/end_to_end/tln.log b/test_data/end_to_end/tln.log index 2193a8a853..686d1086f7 100644 --- a/test_data/end_to_end/tln.log +++ b/test_data/end_to_end/tln.log @@ -1,23 +1,23 @@ Time|Source|Host|User|Description -1453449153|LOG|myhostname.myhost.com|-|2016-01-22T07:52:33+00:00; Content Modification Time; [client, pid: 30840] INFO No change in [/etc/netgroup]. Done -1453449153|LOG|myhostname.myhost.com|-|2016-01-22T07:52:33+00:00; Content Modification Time; [client, pid: 30840] INFO No new content in ímynd.dd. -1453449181|LOG|myhostname.myhost.com|root|2016-01-22T07:53:01+00:00; Content Modification Time; Cron ran: touch /var/run/crond.somecheck for user: root pid: 31051 -1453449241|LOG|myhostname.myhost.com|root|2016-01-22T07:54:01+00:00; Content Modification Time; Cron ran: /sbin/status.mycheck) for user: root pid: 31067 -1453449241|LOG|myhostname.myhost.com|root|2016-01-22T07:54:01+00:00; Content Modification Time; Cron ran: touch /var/run/crond.somecheck for user: root pid: 31068 -1453449272|LOG|myhostname.myhost.com|-|2016-01-22T07:54:32+00:00; Content Modification Time; [Job] `cron.daily' terminated -1454771790|LOG|victoria|-|2016-02-06T15:16:30+00:00; Content Modification Time; [process, pid: 2085] Test message with single character day -1456708543|LOG|:|-|2016-02-29T01:15:43+00:00; Content Modification Time; [---] testing leap year in parsing, events take place in 2012 --- -1458774078|LOG|myhostname.myhost.com|-|2016-03-23T23:01:18+00:00; Content Modification Time; [somrandomexe, pid: 19] This syslog message has a fractional value for seconds. -1458774078|LOG|myhostname.myhost.com|-|2016-03-23T23:01:18+00:00; Content Modification Time; [somrandomexe, pid: 1915] This syslog message is brought to you by me (and not the other guy) -1476630823|FILE|-|-|2016-10-16T15:13:43+00:00; ctime; OS:/tmp/test/test_data/syslog Type: file -1476630823|FILE|-|-|2016-10-16T15:13:43+00:00; ctime; OS:/tmp/test/test_data/syslog Type: file -1476630823|FILE|-|-|2016-10-16T15:13:43+00:00; mtime; OS:/tmp/test/test_data/syslog Type: file -1476630823|FILE|-|-|2016-10-16T15:13:43+00:00; mtime; OS:/tmp/test/test_data/syslog Type: file -1476630824|FILE|-|-|2016-10-16T15:13:44+00:00; atime; OS:/tmp/test/test_data/syslog Type: file -1476630824|FILE|-|-|2016-10-16T15:13:44+00:00; atime; OS:/tmp/test/test_data/syslog Type: file -1479431720|LOG|myhostname.myhost.com|-|2016-11-18T01:15:20+00:00; Content Modification Time; [aprocess, pid: 10100] This is a multi-line message that screws up many syslog parsers. -1479431743|LOG|:|-|2016-11-18T01:15:43+00:00; Content Modification Time; [---] last message repeated 5 times --- -1479457820|LOG|-|-|2016-11-18T08:30:20+00:00; Content Modification Time; [kernel] [997.390602] sda2: rw=0, want=65, limit=2 -1479457880|LOG|victoria|-|2016-11-18T08:31:20+00:00; Content Modification Time; [kernel] [998.390602] sda2: rw=0, want=66, limit=2 -1482083672|LOG|myhostname.myhost.com|-|2016-12-18T17:54:32+00:00; Content Modification Time; [anacron, pid: 1234] No true exit can exist (124 job run) -1483206872|LOG|myhostname.myhost.com|-|2016-12-31T17:54:32+00:00; Content Modification Time; [/sbin/anacron, pid: 1234] Another one just like this (124 job run) +1327218753|LOG|myhostname.myhost.com|-|2012-01-22T07:52:33+00:00; Content Modification Time; [client, pid: 30840] INFO No new content in ímynd.dd. +1327218753|LOG|myhostname.myhost.com|-|2012-01-22T07:52:33+00:00; Content Modification Time; [client, pid: 30840] INFO No change in [/etc/netgroup]. Done +1327218781|LOG|myhostname.myhost.com|root|2012-01-22T07:53:01+00:00; Content Modification Time; Cron ran: touch /var/run/crond.somecheck for user: root pid: 31051 +1327218841|LOG|myhostname.myhost.com|root|2012-01-22T07:54:01+00:00; Content Modification Time; Cron ran: touch /var/run/crond.somecheck for user: root pid: 31068 +1327218841|LOG|myhostname.myhost.com|root|2012-01-22T07:54:01+00:00; Content Modification Time; Cron ran: /sbin/status.mycheck) for user: root pid: 31067 +1327218872|LOG|myhostname.myhost.com|-|2012-01-22T07:54:32+00:00; Content Modification Time; [Job] `cron.daily' terminated +1330478143|LOG|:|-|2012-02-29T01:15:43+00:00; Content Modification Time; [---] testing leap year in parsing, events take place in 2012 --- +1355853272|LOG|myhostname.myhost.com|-|2012-12-18T17:54:32+00:00; Content Modification Time; [anacron, pid: 1234] No true exit can exist (124 job run) +1364079678|LOG|myhostname.myhost.com|-|2013-03-23T23:01:18+00:00; Content Modification Time; [somrandomexe, pid: 1915] This syslog message is brought to you by me (and not the other guy) +1364079678|LOG|myhostname.myhost.com|-|2013-03-23T23:01:18+00:00; Content Modification Time; [somrandomexe, pid: 19] This syslog message has a fractional value for seconds. +1384737320|LOG|myhostname.myhost.com|-|2013-11-18T01:15:20+00:00; Content Modification Time; [aprocess, pid: 10100] This is a multi-line message that screws up many syslog parsers. +1388512472|LOG|myhostname.myhost.com|-|2013-12-31T17:54:32+00:00; Content Modification Time; [/sbin/anacron, pid: 1234] Another one just like this (124 job run) +1391699790|LOG|victoria|-|2014-02-06T15:16:30+00:00; Content Modification Time; [process, pid: 2085] Test message with single character day +1416273343|LOG|:|-|2014-11-18T01:15:43+00:00; Content Modification Time; [---] last message repeated 5 times --- +1416299420|LOG|-|-|2014-11-18T08:30:20+00:00; Content Modification Time; [kernel] [997.390602] sda2: rw=0, want=65, limit=2 +1416299480|LOG|victoria|-|2014-11-18T08:31:20+00:00; Content Modification Time; [kernel] [998.390602] sda2: rw=0, want=66, limit=2 +1485089422|FILE|-|-|2017-01-22T12:50:22+00:00; ctime; OS:/tmp/test/test_data/syslog Type: file +1485089422|FILE|-|-|2017-01-22T12:50:22+00:00; ctime; OS:/tmp/test/test_data/syslog Type: file +1485089422|FILE|-|-|2017-01-22T12:50:22+00:00; mtime; OS:/tmp/test/test_data/syslog Type: file +1485089422|FILE|-|-|2017-01-22T12:50:22+00:00; mtime; OS:/tmp/test/test_data/syslog Type: file +1485089423|FILE|-|-|2017-01-22T12:50:23+00:00; atime; OS:/tmp/test/test_data/syslog Type: file +1485089424|FILE|-|-|2017-01-22T12:50:24+00:00; atime; OS:/tmp/test/test_data/syslog Type: file diff --git a/test_data/pinfo_test.json.plaso b/test_data/pinfo_test.json.plaso index 0b6b13663cc72e4c6cd2269527a94dd83125dae8..30d9f72be1e138aaa1d868c8b1f8d5178e5be11a 100644 GIT binary patch delta 2798 zcmZ8jc{J4PAO4bMn8A!KTQRn=Wp9{>u_lvJlqLI;Y$HbYOm6-|yaYyYD%l^ZxUEmh-&t^PEqOaIG+pIpz=(F8}}_z#5A*#Rsxx zSY+@>;%VYv<;BYr&PwBs(4gxZax=#zbHk;08sVn$u8MaKlS{QYw5@AQFpAsCThK!S z-X*q*LR>G;VX$hR%c9-yP$<`Mi<`o~7iX_o(V{W|fF%R~4qD>eaX3#utRpVaIUrC` z`QWJBu)_FJkx=}5(IHXTCgmYd=Q~O8y1Bv-j*E6r29zY!lOmh>F+K9D%g=BR8MzAI z^25n_f?f_Ew0A!#73u6*Z%$7TOWzuxX+I~Cd85T0&8dI$ z(#?ytV@-%BqG0$;PYg8T9?A%v)g^%DJhQ!1r>mo@6zPDcq=f-1Q#Jz3Z7Upe@p==N z1bp?#LzN6ZhWysGr8pL=?hWs@9&QZ&>CW!EKInDH7(AhSXy@_>l-|-*z~g3O*H+qq z@soV26?wQOC4kQ#@ogZTF$$5QtPrD3Vta^q_EI2_ARd83nvMuqGv4=spwLOMXi@-x zTq$A}!#A2`O?P^sIc9*=OfRrUzz@Cxq*#+s8MyN67E-g`9l{mYWVjqVyS&wSA&MIp zX+&8Sj#JQ_Vqa`Ek=eP=fihkszXU!LO&rVC-2KdrA~eAtWy*9tVsB<`N84jEF( zBuqFPO)VCPRl+mDqU3o{IT2efF=U`sXooFS3*A(YSAggRH&}2Ph?dF!i99X=M|= z_{3O*tB|QM!Q>q$V|E`%MXn2ETAbNbOk&KOX1siugOv6XW+P>`Mbf924(}neE@S^?6I&TY`G?qiG2$Qmq(opEu-lx>PZf{40U6q3K~hC{BXGf)3e{v~W1WSj z(uHaJs$rmh4OuKEX)_XX(}#x%<~WPt75W{Olz96PW{?#qu+f^t;&%ctR>pYz!bI zw$_N_0Ywq?Z|B|Do>7DD7iWbz@(HX;q47F3@#&f@*Lc-|t7y>D!ei!Mwe}!wQjEX7$X7dKH+=k#XpGh4S_wn@2Z+ha&|UvnB_$YCkyuGrXEN*+E0O7d zXc3=a>=Lc?qRTWC%?XupC0zJ`=lSHjd#1zCMBQr&&Yd!H8?kGgZB*l&P?_b&SR-~8 zrQG!@%+HC zG4?oVQ~ZyiHkSOjrdZL+&L`oivaMfL)LpuL%U{`dQ^LGbEr}MoHMU=?`zCYLq?ooy zz0Z!TDw0QAx(xFds#@mp=q%ojsJ!Z@iw&W#_zC2Zn595efcwhI#U&|@GHPnD&DkE# zkYj=Bx3!{GyH9A zHen0^z)FZ4LHHE^OxHT^vs`H#<|rW zryDGVe4%j*hh)v=E=p|vc)L^v>$?5KBS!cfaH{5*`8why)mh%mos>*1GDSV^KaSF= zsT!O6$tkgWomCpKI~f+S63pKdmA}IIJ6Sr&)?7j_t2mtGamA_1)z#_0YzEV=#SiSW z*-GF53E?r`!YWYC^|aX5rq}YcNmTBLfs8sFNi9j>-{gk8qdO+u!o8>LUK zbGIZIMXc>!nQsoe?!N-{{IYF}HIf%joX;Ni+H%&Z)=H=|(`Y&oh7(LJ8+-ppm7BdR z*|lfQveG!9u(m<2CsVsG)GpNV*m-u!H+Z%`4xhk%@^%wLn8wME3bM)dE__t~O|nlx z`t>Qa)u{KTcd(XuD-*mDaC5{U~PlC@lzaVI-48+EIv83M1kUCd%5vLqVX zxf^9lrq8{)y2dzLHn587`=r}b>Go$LuCQEH{sQwAdl@#B)2zML1d_weh`fe~fz{B$ zh)9Ot{{jKTuJcP?(LUN&31`_6_;#;OWvJn(F58Lh8O?gv@BFK97pXe57)HNYHZ~p8 z)>1uVfb;gNE(p~s`!tCSwsLNN(aw7RGp=sl6Qv0VY&&>=YJSo%{%%w=%x^$(@ z9dxCtK9y2xI?oL|voA zt3UlbW8X&Z_L-STUK$?fesKQt3Wqs{fsyyne-=A}4Om6=z*YXci2?sVjZrrU_rMZN z2N=Q-ScoB$@C7U;_O}tS@54a*A^10b|Gvqf$wEN02oqjGAirw=V@ms*p1{9=jZvWl ze+c3i?Z32G4B=;ZPIv@?BM$2SqdqnOfc=*CgB1YWgWR!!gqsk7|BWWhA;hplKypa` e>Ui*o!}}@m`{MogXpK=P3E>TfcMO_pj%-oaa5y^Zp2@s!#D)m@oi&0RR9qpf%0{UQzE7 zV42DTMYG5)%S)7C<#0_&MCQE^zICBTSI*&BE8s=xzVNKuw#6$hL3f9tQlx%td-7Az ze8y>f{PN7M`=FN`;{hf&z3AGgI%6Z)8vY&Ck<$BAd;&iDyYNkZ?`^5m1tif=#=9r~EB)a}YeumNeV4?a{L^Bv~mp)zgRKFrxg%YPx#Nj)5^MS-9+aNK>-)W5PaH)W;8;U(LxN zmP8$dg%VmG!>kL#@wY~sq#Ok5UAdPIr8s}6m9E7j;KZjY8sSDo>~V<;jx^ZDNZ0i- zAxzOS$-0jz)`-MSOU`3uax{ zwwOAQ6%ER##r6(R}Cq0UvoK1D9pWVuM>^cmi0yg zrcTLwym*2z^YDtd{phgj)_ z$tASI(yNsxyn4evv#SMa%4gSHuEf+Eai@8`?e-IdJh%!j2Fj1$<~^y)UEB8kIn$?@ z&e*D)(t6w4y7sRDJbvZ58f$$Ubb)~2uC)H-eOn5>es<2A;qu5!uS}yc$o2-7&~I=s z2l2)md`($-Dn_=KzH1soV0*aTzc=JJez^u+voh2_+ISbw*Vh<4e-p{1KgC*(kpFI& zB&mOL^W}h)fJ$qLq_XnK(;f1(+Fs8z$Ez5S8*akj>(!9%ibtNMm5T4oAC8V8o^Y|< zeTwNzC`@tccn{1gH_N?;NnH;qTe+35Mf_2qHtuS^5=>m&=L|2w=;FIg#GSt^utW0E z5w~qFQ?NqLTu{1nH8BWB~Kz>th3P2c(%p9ZCSauK!* z0eNr$ql1`CXG!>Y-?B@4nG4%wjuRz{@E=#M;tn<84`wDuT3c;orLA$3chF=zk9Twy zRzEDm0~XY|nzfrQb>BVgHdg0+CG67YJ|xwkRXe7YO0G@aY`K0h(48RX+Oj!>=4?!Z zx9O{QYgFMb$|j53m@bOApDl83dWVLIvt%lE2rEs0m^0gq*3}w&yFO+eeGW~PQhf2` z;C;!NYt3Go#1h}RyLN8V2OoCeZVdajD+4#~TyYBhU{AU1vo!2;EZy1d^;)Z?x9 z?hKD0O*!=JtA<_}c4=;G5jG;?P?ioG7sXAYpBeC2+0azA@#9ci(#OR99$P!#zvKeB zlly1hHAC?(2EO}sLB6vk@jq`mTb2viiwk_)4;jAP2`nTysS_^kbi{_#h>EQWkQEX@Kh2M$Qo4Ws>V&ug z#zN*J!~Isw zUSezrsHHPHXT*=2K<<w@^ayP4SJm%&41Y`zwM)vPJ$ z%HzszJt2|sRbz)3F^(2Na8g`C5P@Yi9eM^Tq_i=p`3TQjXn1!AW{QsW9MF2!UM(46 zMSS~gN>A{B5@77{L(8f8{0`r)FZo1l7f3YF__9)Z=v9G(fYi6>O3REhtX4hyeQ*(5!JoAPF?HqpPEBTUXof%uawr5|2*QUT#y;x1Hbml+Hm^RUvBi77tTw>MUJ+s$3^X?LQ||*GTQ| zrs64B$BR{171Tlnin_48tRp16l}JECp5$IyEeiaST1%+hGESWKTX$D0x5xVQ1h^%f z5M=Ubk#~|#M)vF7x!iPGRYiFanTITWFF0UuA<=%Ol&uM&jw&0Rb@LE!{?2?mE|c{w zbk=t_m3*P3vO-ZcN-2g0hz=m(0}Q0Ew`U?^_7TBhNn_*>5o@(x7UTzovV~|x6VT=R z`Jb+J&u}o2)l0yQ9U!+8Mw%IZr?3{Q-A%isIdu5;3(#s2k-&7<<^HW4wsl&8fiQ`+ z&?}P(-N^BQp>h)BPABJXj7Dt zzWspUrk0k2{j%(&Rab+7sGGor=8o695u)_~k3TW%6qlXsb=caS-FJrp296$9XVH25D6#e8H^n)&&%-JhKI9fgNYs4&+i-Ap%jjzKJq@U zf=v>5guxB63DMohEwhOqryMyJ>mi8w@Cg2mKE5O9pg4K(>0`|Qjq7mP6ZjXVJ`#!Z z1IrvUeyq`_;FENXxJEEU=BU%}s{CJOedIRiAC2N7!2*9zCCn~Fw}(?_mp<-z^nQbf Y_y>og{`XY%ky^Mgb_FIwwxeSI1!h(bV*mgE diff --git a/test_data/psort_test.json.plaso b/test_data/psort_test.json.plaso index 1d16853c4dea915e1c12b4cbc463e9f2fe45f521..4e0bd7fea80551b7c0eda3c1a6b2e131ffe37912 100644 GIT binary patch literal 13175 zcmd^lWl){j(k|{E+#$FHcM0wq+@0VW+}(pikPuvg1Pksi!QF#9!7aeu%$!UJIWzP9 zxl(oCU8`#EAN{QE-u?FKepbs#f`Xv}0Rce)Nm{W=csW2uzXJTy1uQtgvNUqgGt_g? zqjPX}ULw|7o9ikD53qNQTTFZy)LOjB162jGjy` zcy85VF-f{>`YR+*O!EwNI&8#lSxGA<({BV_GaMWSZm+y?>KGZ_%|DfGdVY{%hyP)rJzwMdD z`tdAr!u7WD@(e*hG4?`{az{etU5U^9+W>{H_&k(rm{_`t z1T?W(^(z!f_N}M`_iD1?t4cV?XCSG}k>ulzGt}%~##sc#7#k)Wo|xV95;ZQX=O_scY0&27dcjAyx_5$p~H zHLN{>T!npjoVyO4Obw&8jTw}Awk7;U)YjUsyY!)N<^`)WUNUFYNp;dIlP|flRDGbC z1IwMRVDxyg>9-v&KL1$;xpG03cPej z3ch0OT^LJ9nw9``R;EyaAY{Sk&Rv}QS_H)jn;D;DyDlfs1>X&LtuM&&qq#1T(0}WE zWzS2iK|l)OpqK=fgesxVbqM7GVKEm%;wh(5vzJWo5w{H8o*rWZPE7~a`FuzN+W8k-|)WgN`f)$Y&URp>CN zdSBvo6Jz*hYRYmSF5_6mr3Lwh?@9<;U&Vx^n{MmiEY(d1EMn1Mr_>fTeXP#1vGL?% zc7E@)d#{!JF%;{t;rtMc&Isib{h2Ak?%GluY)?PuSzKk!$Xd-nGZiuPG)Eim3*BTp zb7vt!2p2oe*$BJOb-C;4<9Ku99)+Gu;rnK)!q+sYwQlEHD6_zPgO~Of*b1&Q2K4%Y zmZU>kt@Fkf5fr0k=`!kj6_RsF6&zsOpQOCsQ@n_k$DM3MJV~!ONbBZ>wvm^6{YI#+3)hU+pQ)jC?_GhQHs%Wy;wpA|f zmz5JK8CQt+m_5GKPNRb~aPcBSvq9wjjIThTmx~$1z3w1?O^jNrp|(zdgct`{$be;I zXJlh%Z2(A*|H+Ed5ex`z^l-xVaeYj%@v3zh+Q4M~MY>Y!8IE!6g8cItqJfG6`nUJD z7gI=WussLilrnIFka6d?89`c*!A&G07TY5{{l%PbbMb{a*fP-vF+++96JI#iS}|QS zc=?v2Zq_OxzB7xY%a#7h$iypMeRyK(fAK_1+$A9*2!M8g3=V_^SVoRURt`G$)^F?# zjC4$`42_)sqpg9it}YPJuO{kJml{O?goXhN^QrJ3C(xhyUn_zi2801lc#}eKQsCmC z1C(L_SYTg>c09KdD+A765Ygxra)&hfVe|b7cA#E>;d&%#wBK!w@3_#`>RuY!AbhK- z?*ddX4?pSVWK!j@^D>!hWM2e#3^LUy;-GR7WffRc>p7b(Qtw?k6dp(DS#)`%X_jQ6 zrUzj*MXl#ued!gG;1HSaS5hwDefVEZ=z#A!kp!qN1z0G5nef7GuZ{e1T0yG)7~qz9RG%06Q`=-)aM-pg#PD45Tq*kpHs%~8 zxefYkciw+7S%dg>>uXkL7=M88Yw7S9R0;k)45^tRYv82hc|1jp8?mz0E6eC{u! zu6EbzBukvsl-85sO5SSDd~+uVnRp zHTH8a`o_M*Tg3_6oc;a6CanSXj>W_~VWc8dkC8wsrNxax%VGO1<+_V!k^#7~7JQT* z;%#8C;;&;^hhG*zjr!;n8T#84vqy%@=~XJvLJ;Kpk!8`sPD1awAkXBEcf*Y)>}-hh zg&7eo;liscOH+isuFu(IJ-erdyDW$EKCja%Ip zYh(3Z>*wfVkiw8=NBDYqpcXEJNe->TZ#x+98B@}_Op(ln6qV=GWs2dE7igIMr<=Y& ztvQ4ejiaj%%-lktd+geFA=@`>edTJ5C2Vt??SQRy%Tl0XyeCZWQzBjJOkxxE{EQ%tiYqm-nw0C$AfJa z?!oMDrf z>4G$!!f8wxLdby!d0iOGbl+tmhwz(9rpn;Y)TAo5u*{yJfkZrg=QPB+eTXJtsLw+2 zd0Z=@*Lq8RWu1XJsL&hy`Hf<2$tXXyywO#CEyUXou zTH(uouj}iX%hf!^;vtO7Dpe06Jm71%`Lx;UJ1VRvn2pP|UXv3T)ME?PT8~q`xZ6_^ zK|69oC64l9f#cNW|owvZX5;wF(xsfLV>QodxXYiUay?Df21GKnc zUygv^ZtHNBJ=Bd-Fsco#9dB$FTdyIU4qOaR}iswy~7C*w-n}*Q*HC3#FQ?kqr zpe-4o+wDegfw0}#t8J33`AlIhQ7d4!5l|aOGxgVLLgQ^JK{A_RZcsUGoU|Y&q4b~X z6_-hA^i0@jYzAT*Ayq@zm+PKsdNqWaE3IrgXm9SwAAQrXI_WSTzoC17@tW6dreQ$t zlvdf{MVb>qnGzYA1st=80msO+9y31wU#hiAw}+V=b#s~lJ{~{ z;6dO7!KCVKky=*|I?}Bf0`zOGG&C{bB{v!2SbcGvlB|mmgYEER}adS#{s5xcEwlYj+=)HyLKPn12eBCecyBpVXYuaYEmQ zN!g#8sJ!yy##wRNK-frW~Y+|(cL3fD69^}=ok6yOs(%berAXL;O$j1T=^`21 zsrS*wk&DRH8v060sFM3-wQMW2qm-3jfE_>N0iIN729Wgh)mjpqN=5Z}dR#NSCb$$V zTql%=3bEqt^rYXq{=34|g;G5gIrR`%K4*fxi1Hzf4m2Wzt#_-Z#g3A22~LGxoxllS zajS#+F~Hi+KSoS!Topz7ZC?0%nppW-r%_a?@79AiULx#SDI@W|(ia-)0vC2QOX<$6 z=Vq%m1;1tv^Z@ExZd~F#?Adh-sp(bozrl9d55P#a)E!+8IT~BugL`Il%h&XB;HuBbV@31$&(D^3bK{=(!{E5DY!eu(t;nM#PBWA zGnHb5b&kO0r%8iyjJ1qxF-M)*ZsJi%Mh@IeYWkHq-j$JF^`OluM4h~6I>nXLHpQgM z$v)9@%2&;yJZoU%dj}x_eWAF9S+)-yr}t7E?A^=5=PdMeY$@>LEb~PvK{p#Ef%I=u zne{(RY}eW00zK#ada1@&Ot4!Dv8W1i{t~j(#szk1-Z%Wn?8@uhMCoWh|Flh>p*k+s zW^BO`7S3iP~$-S5dcqAU+k%yY_M@w&DZE0g+U)d?POBaX*QWi7+ts>wnteFCeNV`u4B`fr zosWcSI?pV{#t^FsN?XwMd0A=@D;rZs!aJSSJ2Rj}>Q&<4AhBsHl#U3HnNZ>K`?sI< zxp|i%rNv0{4b4NQY2GwG3N0xB#gbK=w!HzUk@f$cINn<>IL6_;<5UzJ99xM)pbCS6*^qnc?PnA&V@yyfNPdU0@L2GxWw ztvG}_eeHR+Zv4jS=E(8m=al-Pl=}Mai6J(m%s_z;>hjj^mN)2jODYnbV=pCob9Ycf zgcA%17ofLG`MkXDKV93F@y$k#vn|C!eb?*RZc5PVa>7gKn|U9Hus$4CD(W$7Jlm{EcGUy#IWpOI)+ou zsB&iHUF!S7vdHm4yVI|6`k!F&N8VY8ez51(8Aw((e-@Q{LVhCCwCwclv$8AZcu$@5 ztZF6;z7bYWD%`0Ot0g`zbyTJOF=>L$IK%$cjz#u~86G052)ii{#(3kx1IbbS3P5s< z8Ot)iyzAT?_{!s}?r!1~24o1K$?irU-4q=`%U_P#k8iIs3*Oum$+^W#tTHvbg;{{{ zf!Z?D<8zs!MQ4U&XM`~8cln9CVXjhh?PaXPiv$UoU5_x;saD=G-%gbC9KRAlpVtCa z;uG|V6gqB7>bg>iEzAf-d-Ofmx^Cvrs?8O^w3-vAM)n*GmwRhrOrN2|?Nf?*NFfQz zi!g>p95BJa5>n2wd8V)o+jGnG^qES8u^{qt&K>ABIel9l@UedXmL0UmzQnlnpYf=Q18}mqa@`R8}ptqGNMtmeQYlnC=#Q8#{g8>d%)mG z+4xGy)bV5le?3oW*F9&!AG~I`)mTJ+i{0Qg#E@p5AAv~Tx^w=14nx!F-RqdEC1*Az zxrjv-Fo-5Fv!Rb~FhoLt@zA<}y8>bs2e|~^1J9L2b--42QkgBLOZQ6kRvHg_yH@$V z$rJLHY=qW=N;ZmHwQ#HB`0wcuNAnB*#4 zO~D6TdMphp)Tlr7d1&lvA4#MRMf6ZBM!e$ceiTHGk=y; z^LS+~d$io{Y4)vQZ}PsrQV|Vy5gcz3O0$ndS1)A@Ws@3c~Ela<4(3*u9 zy2iePRJ((DeegK^CEW2^O(o8{)@^cvhMd=)!bL%l2UqpSZ7N2?jOd%ZVe$;TZ>yH< zPNkGNi98U`jq8fnYN?av28)q}IWNXCJ9)1ha8Brg2x%v`)rePpxra3HZOPN(62G(c zU=!^PYkpBmmo{Kc0h2`hT;@U+EZ7b|m$IuZF{S<{2*7W2-|XLR*xGwl1J6zV!ExABu%w>_y+u4*b-ej{hu_@2 zGcxXKuj6(7$+B=`Z2Wtk7o=TlJkR!@Bu5=0Mxs$J7)pzVLCoDH;?JYvAus+L$-(%4 zo#c4X@^NACPb3H9QKP}iGer(T+x3I4;S-idoAjJyyVk@dXcimJ&XSVGLh4VC zX)P$7t$G!veD5yhz1h2o9nM|v#p1ZyA6w#nE~|k;-oyc^z_xY;WrxcWHB|4&;N>FG zWP!tem6|FAU^zS?jHWGW?;PstxFhz993EEaAqxxeh29wNi_h>U_TYOCEtfI=B(^@N>V|YjEg`1E~iMQlU>2Xa( znc-AloO3)@LhR!m#OR!9F=HpiV0>-Yi&!S}lAK$}Od7>_DX@(gW|Wl>v5Jf5zr=bX z09cN6IPFPkDgeu&wDI}&o(JKw4zA_gv|fGRt>I3O9(n4h&KOQR9`*LyMfK=(3u%Z- z89#{*4MhO62J1F+&RD$(17x&-T++}yVM65Q1gjI@|gxGL&NzO7UE7QFc^bwc~jVj9ZOMHt9;lO%^D^k zpE(o3`<}P5V|sk%aAofME_EVeDC)ofW5d9K6=u*66}MWqHS{IRdQ04-eq!H+lVuI9 zaG;LlNZD}Xp@35@um+Y6+omCmr%b|fefc_-X=~R|FMT%oj1CZg?uv~_3+3wCNS5f_ z%lEf5m#GZC5Xt&ro>>homoz7j=C+3aoAqL+7{feEb6?W=)6 zmL}c_EyacL#zKZm9$Vr-?cBB3C-UUIGm3@|ofoG{Jz)%u!i?}n)cNW}G{n}Fe<6Jx zS6;ZaG#Qz0OZ1lF0KC)7^26nq?1Nr2idNOLJ6x!1S?&T(f_`^T3GFM&QS~akZD88G zJN(G34-%?~*qy@bB-)%ba14DuC#pU$t{9wUt~;OV+OT)HYC`f8#@Q|RSThfxwzPq6 zhunZW6Nan8PQ=M-N&6l7+fb}yAd?J7E2lYuM^`jN2Qm8Hq7~Hf8FIlQxR+zR8AQO4i26UKTFj`$FnAfTF z)+1V0#Yuhvqv*~yWL1Zn=ZGYd3B>Akay=fZXU+6tldl?udS3GW`;?q8RHV_Cr|Rt{ z#77-w>Ie(u5o-)pZ<*aN5Xu#|p`83Ge)|LYmo701LXxzw@pf&?_)tm?7=$X2zSZd2 zlT@ess!{j|ab63ka=7>E^e=X~iHsz91C9+m3dEwP))ME$nhKwn)SmOb73fSl2bN+K zNYJWa757F9hsp3;+Qt~xP#R&_Pqm#Z>?z>kZMkS)a`tO3Xzn5~M(x8Qkv9yU58}In z4z#sokIg)04EDbCIqel~X=IY-;t5@H{QmsihDh-0(U3Os%_@ZckUWq~05#;m3-NSr zzUK?;2AX?0*2e}Bob<&eZ#)MMDR}v?l;@4;H;E;sD0}&@kRoE2i4!6duq$ATFos#^ zJ*Z0udv}#q^^RPfZEjs~=xz{dP9Zc2z9Xw2Ez{T-xPZC)i+rSAF?H9Kscrd)C>qS8 zD&UD6ngF?lAf6hI(dx=f7arF%+FEEBqmrzlOSU*KsCcV(HP<5DQk02Ky7dJ;77bYV-5n8tMvyLNE@Yb{fhy@6{6Xm|ga++h4aKyDbb zGh#`BKDumqTEzTUcH_TRFaO4FF#dnVZanzWU}KVU@qfT>FvdqoKd>7Cf3O=NmNR8_ z)EvRXL-|mKy|2lDpVSO7@x5dDI@fv(MFUhyh6o; zPpmh+ywAlx$`kuqx1B`$!gmEk%veorYmFPYPvCl)Mp74@dn()twnM0{L}JPpQ z1gFc$1fY8+0qbw3IGB{87i8M$;R3!&7iy1{G;0J_!ZL@Dd++vGrczPxDXkFqRVJ>5 zhE z!6m#zxtThN#UU%$E`b@t79~nA?&{@6tLk~jCfi4N?}%O+XpoGV#-Iy6)kh4;M+-q4 zU`WBXmWq(V!7jVrU@Q!beu}Nw1C>kZE7VKN{6bM|6MndrPF?h9k}iAHxdx5YL*8+w z^EpjCLF150Pg)UyBe9h`d_B!T~HjD?JMrdsBNIz|F9=orBIJ&QUr-8m0{p zdv0;`=FwEhgB|MISD&r^}nBg_f)qpFq z_%Kg02MV!Xz~6w^UN^z_Z79>%PO=P~DJKbQQRml4nMaore7Md=0xWdE`t#P(LC@qj zqSYCq!vqrx^D`3z;DYm45O0SZjAnpvCMXaP+AkoFeDEcj|G@{sZgEjnP!j>mG7Y|5 z(NUFx&?Kf}xubHfGgb|Oufj%)DJDB7s%fj~WY$hLOr%pSLBVcR|vpQGq6k9HC^E z7&cqG4XCU=^2V$*LyD{tEWl%F=FNr{f2|@QIl1s$joY#!vR4|YfuKpI_DZmCZnPS1 znV&c=J!)glNN$OH>rn3SSBum0qi5CumVW#fPL(Mlir=c8@PShmD*D*hRnxhk!~uo< zZCaqsTUUW4)cw77#VT`ja!EPP<^3(+x7*6Ov;YEzm~?LRqb3X)+0PaGV4tTXcI4SW zPfLd*qz_-Vr8qCb9>Xg8k}?fbEwH)DDZncr6dKH6avCukhusiTE#gBXQleb;knXj} zmM{a;1e$4biqyrWgL}HH@R#T5iV)(Kib{a_n4Fci!tNyM>J=sTSE{RZM6nyq* z20;PvUgUK9=`#Ow@?rjwe7aK<8*{Z@l~)PRf+u%8Yk9NAYK0b;PGyU1G%I;5L^2oE zmGky(@!>QdV#*iF^v2l!mn+XaI< z7<*2O&1LTRlVmh-)nRivqUTAuP$;C;!wqr_oYvk5jw76*)s75Q5kVUAqZmesTEuM% zSIDWfCh)DK;Wx}LgtX6KzmBN+_N>PW>NiWZvA^3A9R|)yLIDS15{)==RH?%AnpJ5q zXb7_;HNa#ktRXts|6t`L4eG8NKGkv)I)(R{z8wEN&7!2c$+N?^R&yJ{<}#~R5FT@M zRmkvLT)NjI3j4v!(U@Xe55pS&o_zqM&7axF{vrE>%0FZux&S?kAf(rxV!67=Z`W@v z)WMcEESoMG-zw~Yx5@fc1mdXe`c9Z)Zcb8md+yatiNMa3H$qOoM%u)x--^YlNmt<- z!%sdpJnuH{4JYi2!z|-`K|I4F^3@zpfX=2Q<_s9rr6j7T`;$dI!KF#Y%XUT0)&WXm zp9_qi@6m%5R*>^V57o~#jAkN3^bAfYtCbfuA^zDs5`@x%cJYCuUKeKaoK(J(UjyA-4j0yKw~56$ z-%lxlqhGy0iluqyF!-7xDHP5~%}g>Xo(uvni3uKf7sK=+1NEwdof1;(rAYYX@nbb& z7PNqw4dP*dWPuU|;wcJ2`8lA%BUNI%coA7ha1*)~WBVIe%9}|J4*4kZ!dSw-4ymiB zK7q7cVJP1QZIJas>oN}1qq$#WLm9uoKqt|-f(e$5&dXZ^TBK_(ADh!NfKiI zB?(bYcXa)cgjUM}{9Ap+<0y_@T)!Zg60>L-v3i`ZUFc8w=-Q&VV+W*KVT|-T%N)_I;WqaJY^eypa7JC z{GrU_0jsC-Pv@e3$XEJQ|0Vwq+dsXd{vqxSY=-b-``>P?pX%_~p%3Zi$KwqQg$Mxo z?PB?<{L{AUhkT_E?_cu&V8hcJ*q`EZfW-GBpnkiLeXPYp!TI#o@+XV|poIGY^C&JE zpCbHyT)F{@zo#ROzllF~;{);jQ-4GG`?w?m3@`qv z%%iycP5vpB{ll0_pVYtP|6%*5^uiBuZ(vb^AKO2T%inZ(?9gB1a^nHyX`OL{PnLqX>OikJnd$FU{v~)|AO(;u?$Zkp0)=+A+kw-dgQ5Ne{svh0spj; z{t44U_7mojW0@Z-@__KPO8kjHM)6OM{pCM?0D4-e{Q&X?-U9&r`L;dj8h^uhTBiNL vsPti^e2nqavA;n)EqQ)IP*VN$$WzAx+yV|b@qmB`03Tc^ARuAthhP5-eFc{E literal 13906 zcmeHtWl)_<7A@`?2o^lS-8Hxb53a%8-QC>@65QS0-JPJpf;$9v-pSm_B;n4zGk;#y zysGz6U2uxp>)XA0pWVAx@AqP$V5mSqKu|!nLGMKht99+=0G~7g0}e3E^lY_swQRMh zZ5?fyLV004Y2n(B+Wp~(*@2y+6+V;aJ1pl+RCe#43uXNrL-6v>LS5=k^r%0AR} zw48cF)HoW@5s?A8`H?g)X_-HEe9PtZk`jzdv+t3SXkXFvE>NRzH=mZ|l3whs_R% zu2Qj>ExPDhtK-0s<{y=&%2%wNaf59-h9$Un!Y5eE-8%@%qH+x{m{uo+sG1*l$UZ;Yxs&Q^Yqpbrg&k3^;cNm=6K2NpEz;16 zJNcN)y_nA@5^Ue%{I1Iwd|)5Grxs9S|Ruu5tdZD>sTu{jedBm1+lG^;P@Is@qIGY^_zTBfHnaPz$ zCgd2d?j4T;P$zf?>8;R09oWz$?9ij?%x#;eWC$d2yI0w4o}e2PSG4 zB(9~I5kOEH(KyR))I_W|ejae`Amg%&=}Mo=H=hYLim;*W5sJ!xTh0*|Q8eYZTZXs^lLh;rkovDZ!nCY$<&u11VcUUiIZDC#(v- zOcynj5lN4qbI4>71MIQH)?;+~g}m+zW=XQ%UhC8}i5FC4_eA29MSqizKA!Dw7-0mN z$evoH6xywlD5Ey9hMCk(bQjZaMwYrFhvcLe-p+Z%sStqSfRnakkeaef;^)g2UfO96 zO?eqVb}G0w$+na(W-rQ&n&yGVV$Ny%O=Fl`J_FoGdX&Z?j8PnE56f!ZQJjWT+F?In z3FYmfz4bJwIj%z5w0T_#b3_`qd~hw}=;eJv1kH3k$y@gcGUl z#hT)q8TPTP{Je`9!v1o6+7C|;SF=c+u>D7&WRh_Fkg=B!837+416$t+neI-!9V%gu z$i)?8W64At#Rw`cjHj`0FlV@>^YE@j-ENRaOf(Ls&XxE|&%h;7cYJ2#djbH!n4SnzoCGV0zr;g5X>dGDWpCRRbdM1LNRkP zOaNd0D4#4C+!nzXkJ@wlPH>2^LZpz>6+fF)Z~3^X>_(Pf_GRRh5_Un)g#iZ=J>9Vs z&g`=lGvWqG5d%y^957z|YdXJ@0N*xbTPU(o%}axX&V@ijS_Coe|)aI-alE zv6yLn@TyK)af=DMd?>fX=TgsUE?HznV^^MN(Xffm;w!gSkg6%U3&UZ5XHqLC{@e|) z4g++51j`@G@C${9Nn#PN`j9=t?xTCv2|<_j+VEg^H2a|N5EjHHYZRFU&ORG#XNSZtBB_>4%EOLVfq!5t1}l zkBx zIa)K9MoY9Br5-88)Kab-G%0-3G{#NIr5s#$x3$CBM*ifn z;Rzh*aSj4W{c#LJ{6OjK5Hug)tWi?~B&V$DMAd^>ON!zeaVQt4c@@oX8oi0*X5>cB zlI>gjM|@#>8wk9=ev7f7>21KAalO}Tawl3%ah|ej-L0;6PnA(@An1sx6=5Fx5{Fw+ zsAYC|nA<9^kClFvW3ffMq4?;QW&q{3FNa+_yn|CMl7oIFbK z3$wa~2D7`iLC{ueP-~Z^((0D3Jv<>~_ZIbd-;VuCLGF7;s#JJ2{3m@g`YalVP=euj z30CN@x{#Kl@N}9lm9nFRUz@ImL+1Js`2)j4vUK01Jo3V-;LD>V^A)5a4a!*Pw7 z)Lgri{VFNwcwtz7Wwc|gJf)BXml2MHwc@a zvv5y{@sF&d$pd$Upct9O9~DGWFlWV|7I|dd;m2j(GNzXaYI3tZ_H!~a#6t5nK$bSV zSXL@#LQrRg{mg!<LlYlQ2d{$t>*@PBgK`<=8r54=YhX@0X0TxQ<|Szn$xn zEdN#o-ff?#>Y+bVsaYp=Y-XpX1d^xX@I9ik^^Su~-(XbUdv9O^HmDwPRTLap*0dde zim@YpgYdLQY>iFQJ=J;$*^*Ns@|I2BAU32X6zwNBl8S~NyF*K6G13~td4ztdGl_Fj z3wq;)MRw57TV)6-@Iw^PpCQv{$)h85`A9vtqA?EjVh5c}J!1?XAk*WqN1v8X?&70X zDB2%DYM)jdt&NqyNb)tkg-1S6^1gwdKzT3TNa5J~SdFS&x|Wx%t2E_c;5sd3_LiC` zwm^oZ9U{Q5q$a;fWN~iks>3>U8DGg)+-3~ytZt^HUTLYI)h8e#<$>y0|otkP77X%U!gSr)hoMtkj>wy{=S;DM8#;f6J)ZHw|2|S)fVWc2w zFB@1m)jprgnfx6$C(k{d#IX74(nIsQ0-)~!+aTOF-tD2%oMld zy@}v<{tLsr;g$u>qXxP?Urb6mx1d|iU9vrDTmwBC(-_>LWXqZ9X3>b}&|W(1-)Rxt;=OKevv_}9jq}nRsu zY?fjP|I+wDW;+HOEnW`V3r-SWmAEcm$nbY-q zaUH-y9&)B2Q!iCjqm29;TkVmS${!6Ys+lRefIr-AxUlnZT~W6^I78j?T<>jr93U}v z6@Ngw{YbBv_xk<4DGE!VetFxQFNm2DIpp3(wG4J?$zJDGOXA#WN|?{;kzb3Me{A5^ z))v&3_7*hc@s(_uhY4{j&Fdh~yOmr9<-D=f<=y@U}fHl)F;kEQ5I)>_2b2 zlWQ7Hl;}sD^RzTj>g@>qb>wRK&GEi*1#1y*!*l^j7C9>guFK@iaS1Pi^qY%X*}C-V zRAP3D58jiX~b!E9?u{YXxJe zC8!V_XVl#Vnn*goE$ySfSiz9zJe*at)Tmp4eb! z>>)k;(E98Y@19I`^94Xq9DtzjHR6v`ypDyLrKz4R;6VS+8Zj<%#2AzS)o+Rgx>O91 zR9WZaI0N$=WWj`cTx2o$a0xAvO$IW_Y$tM3&z!iLTMq2ektHKZDS-=F5B!dV-*#lz z9HOy>E*;b;m`9#9ouvbH@2kI80v;&!^HP@-4NJk*E!!x5+1ER+(4KI)2q_JJByKqz zWcrS?6aIQl1f|3$uy@-g^Z45k%AQT=D%pmwMW68Ypk>9&3@)2`t>H9@JZMS+X707K zIa`MLxyqaPMGe@QO;O9WLG9OkO(M+Z7~^uJ$qyO}wBN2h@yx&kO08=@LWRAU<@o(8 z5O$}mGyo`>U;MSMLGPx}VZP3cw1h}{3-2|y#2P;Jrilkp&LTQHA*2}}6zTV$Y1t6{{jDBG*#7`Zls$JhZn>Mz2*uM~3TsA#o zVfl2p7f*p9ExasCVJKHSt%KNPK|#aL)#{!Lb|n)>GgFUG%pz#BN46K= z*5mWZR#UPy4&SvsC)tpdFK-wnt%CZh&$A_Awwc(U4&Yz| zv*~szoV*!*@5j)yKvNX=EyoFfvTM~uuAFIrl8g@WB2jPfr&Ieo9>HxUPH=fkT45K- zfdqk}48US`D0atDC{3!o69N`!R&QVA#Ov^sQcGp&q|<#VW53Ah7Nx6qX4U^6xVd009~vGK9SNuMR_@4^tg_J#Srv4 zb~s9vXws?n$S6eC9nNAM1nVf4W|eC1Vs#Qbf^21JE z=NA(h2^4qts8n|JNrP0Da2U3#BGnFtNO9YhG3pw(IVolpU#k?VfzCiQbOc z5i`4y3B;VZV_%3TE>A&YCc>b8jeC?cU{9%-%-yb+mr_2nId z6)+OS^)M+6rar>RM&kZwW%MxdFY9x3Oug}Fc%MtJ4+^-;W_%)`DR96WIU3vEEhRbl zx_YPb>jj}32MR_ubK@z;l4V6VAZ?G}Q9?$+)pNynsD4$g(`^eOh});Iss3ti`nZ z0IjlfRNZZNN)td5T||8q&KW9QqZVHm_zpxrq&yq8p2=yPlK~0c9wK!ixIEX5f}%v9 z6QNxS;tVLM*YLxY$ESxM4?ZJfBQNP`B-w)r+%AYsa#{9AV&jxvA~^+B+D27( zj!iR9{9)_?Y1nsJW5M=9Bbc120@5dHbWAbjyR3ocuH#}$6PkY&j5}@LG(VYlR65T* z(xqU>a)s!y_th#ZY25l`n5%Pt%^liusfmwZtELb*jF)vqX#65fh0970{gG%SN@uZBe|FiJg?7wac1*C~q%P68bW^<%OiN|7Lu9p3b>1*enweQP-&57= zrL)U8r}2I-duoSLqz1lIluNv#T{qeC2}seI)5~1uwo)8ZKIvOA!D)X(@PhC*FKll% zQ#5f$z4Z(hESx9b6jQ$3(Hy;5k7(C>Sbb`li!7n#|stCeOunnPc^(k8kD3GR<*_JBB3N4p0a`I z-Y0ytB63u0D1Uuku4|ZYlaaOwgqC6*6`Q-L`K5JC*0t&rO!xm~QcpC?}gv}h%ZzlmZ zTD6>ub^u-gjdgp$-7vkO8N$#E7i)Z$usjXhLmPKN7pDM@pfRv`exovH(nQ!Oe6j=b zYJ3*d3=eBOEi>TJ-RFQCEk%k@Zfdg(`sl8>Fhj};GJ_V8;Ju$A(RoiOCI$W3f^Hnj zEdBMJZ)cmyy~HZkBf_EvXW^aj%Gj%Ow~%#cY8gm_AT_xMUt4)MW{I58alZDYH^?6y zIra5Q`;`;fmlDp7S`w3_udmNR#`#v=F;_SC z1vP0J2%kX5DQQi-5D94g<5kC5Sgop|u@9=Km1P7+_BXswM^-X+x=iOK@d9_yk!sDa zMGdAW9GsE>VeFS)6EGmaPS^MDLnAMasLphfbQuInu^(tZry2Y1v ze8yUQr&GsCY6Ge5z<1Dg^af4U^$X=F@!Q>3m8NLjG~bAvd_RfrL8y`hFbQJ*fC%v;%^D^@ z_3g6-`%EM&9vf{DOsdE&Al~SS*O_75=_vKUp%5BlXJ4lGS6O)_KM&BD&q?KP<+Y+SfN+l zKI0DjBhxk%<>xPy?xA-*n zu3jnffSWNs+~M2#yM3_dPGk41w&M2%&eX7pp4rX~4q2Hju+K9s(eaa{efSh;fF z39@bC&93yk38|`c$>7r!4-~N0$<+#7t{CW4+ghu07z^YWQ)w%8{B%XS}_={!@28k#M$u3$Xzaa_2G7+r08_)t+-)?A4Gq%e&^rup~Qb^L%s#ZL` z)$cysj(pl48{_D&E#IcU&qHm0CPU^;dji8!dhM-fMQZ3wImZgwx; z%76k=2kzT1w(cKU$)W|eTY3KNQ32tA4+{$tI!E>0mc)u(pEGOq;rLl&ZUxM!NSPlK zu5aksUc1g9@-!}gxlvm-QiV!wCS^{%y%_OJ((S}|?_emXDL%>uO0Er*%|tuCijS$V zY{VhI?_Hx2L!wehBfB=Pp!XAZe@aI0Htc=*mB}%-`(+h>SN&)wIQ&LpwggY+gn$V# zKbneMm}$6#B1OUN)_Ay-b8MEWKd;@m^-h_cI>*)ICceFJ`eRv>j!5L`ER-=VVblFH zYS}Bl!ioZD)dpxruK-_4EpshXCmSOh4ZxkSg|)55FJj|T^oVgcKB`~Sj?cT9E?ViV z+@?vzolP$lbui3 zUfJ{JRXd*9_M5KMR#Y;R(87g9z>n}LpnD-C>77ooe`fAUmff0f(%s0%9&_@`(M^Wc z%a4tFVQ4D9HOOPls9CP;q7Irsv>FR0i-Ei#hh@bBwNtr16mM8l8^0cR3ChS}-alsf zQDMWMLRSt1or4zNtfPaDMAYQ{J^E#Df-emp~G$=%hHJ+BzjR%TtlR15n4MKEk|>ZO!EvYI7*~!sCU@i%iKfx zTb`-bJD&ItegNue0n2~}`2EKXtF4y7Z~Ac9um~^+Eu4TfX$ZobamEpCKh@}5nD<^V za_JyAJLxajw^`b-XUrY;!~8=A4)QZ5vHAlCNJY}$yQ}WGsKLlWC?ppwB8gxp?q{9 z@QO_tl+!BZa}M%4)Jk|?=5b0pHuSAPt?*AH`ce<2`{iTjUgHLaP;mU8Xp!n7;Ur>i zUtEk+%O>@Yj}q1AIry%r?tVT??o?UiBa*X(5HHTYn1Sk$IFT0f307@oR_nindbmB8z}p!uskmcz&W%DSJY5RRL{?#Lung%o<2QzhR+hP&fuMZ7mJAR4vimP>^u)kt@u$(S4K+ld z2ub&G3P0^yB&r`_2?liv>yi7Z)A#b4Xmn#!;I$KRJGhjcZ>d*plKh_C_=WfPHUab% z0jT^t5t|__Qo_6oAIWd(lR$CNp#6zdW05ry<7yDo3ka~7Lg|gO^CXXfP11snBt(vkohFF7og zVkOmD6sjBxGW(A~hbgafB{0mRtP?;$z{n6H+{cQpcVyD^_QAri#gT>EJhT*3>5joiACA}^v((giG<)k!Q@;#$vd!1iFO>OKXnD3U zP07R!TpY#E2>c_(*Q1rw04k+m&&-j5^`wlCct5#m05k~_lW#Wshs@6i$o$_U3_uG; ziAi6uhirc)sbXv~RRJ`h+hk5UhVP^E+1BpV)Yz1{gO=EdF^;q=Z{PyQ zWi*{SBRG0MbY1htOPA4pFce=PQO{Q`ljg)Le$ZZDK=`tAwD3BXPn&&%#;0t$V+@U3o+=JV~V$~*qSFTh`IhGTn zAHuHxkIa7x$oyL|hze^1CB2c3c-t||mW_BY_q_+-Ge6q)&W8#NOgBOg*HH@X@vh*L zEgaD`n1KvUB?GM%kW=wV0eoqS%;^|nSgP8bwP3uLc(;Wg@5j#yP5iBPl3LB>W|#Mx z?T)e~cn`cZ@7s>Ni1@@cUgvjs9^C%Y*zuiO0i4$>|6S&%f0p@${wFhkUUR1m{iq*( z!ylPnMh~A2i*~N~Z5iFx&ZWn_|@}UxvPk-T^1!uxuExic#c$ zg;A#V3cFmqFCNG;-8whHPqG@H;5U<4!EwnU~35()tYl9rjB(x3t$gsU(Lphe} z6!{iM4OnXiG;cOKeAVfz5q-lA$-xq1rI{p2=$cY14T1T@*I&6HK+x-bajr+=7JPU$ z&M_jq%r_0ZxWgR{=VRyykQVNu2)(wQt`%v8j;-FH^WV&^#GqZi=?6cvxuw8PNbpca zT!o$a1oSAUHqnf1G8Pq4vjJ-|t=P`kKr64=fV{r0`dAJwrWIMk=2VQS-2kXefk99~ zzZdxbzC#@FkAFO#fQ|C^e(~p)&%3+-umb|B_HqaO`Qw)O*Iw`E2+uDQej+%7|C^4< z=jOlO_kZ8he>|SR_yE!QFIyR(!#rS+^{2Lj=UzO&o&O1=fcz8Ym$;;Vj_}uUDGfL~ zJn!WFlliYS{vMZC0Q0|!%Rj+9zlZuc4@N-W&2KP2N&OSbU&kdVplA0_arr0v=XW?i zy>S1R{olO)uj7&o@5k$($K{`V_?6J#9+b9k;5{WQlU z_!&^o~ZUeqdC4b^eC&lUTav5YG=~KOvw=elqf$*xzXRzQaF1zx;$DBK-;T3$cv9I`SRi zd71tb!ISLoi2ZGizXSa>v7i)xLF}I}{+ih4-!Og>`zMI!HP6rafKvTr7;0GHD2ne70`=|c{YaLUo diff --git a/tests/containers/events.py b/tests/containers/events.py index 6ac44ecb16..f06987bb6d 100644 --- a/tests/containers/events.py +++ b/tests/containers/events.py @@ -238,12 +238,10 @@ class EventTagTest(shared_test_lib.BaseTestCase): def testCopyToDict(self): """Tests the CopyToDict function.""" event_tag = events.EventTag( - comment=u'This is a test event tag.', - event_uuid=u'11fca043ea224a688137deaa8d162807') + comment=u'This is a test event tag.') expected_dict = { u'comment': u'This is a test event tag.', - u'event_uuid': u'11fca043ea224a688137deaa8d162807', u'labels': []} test_dict = event_tag.CopyToDict() diff --git a/tests/frontend/preg.py b/tests/frontend/preg.py index 90aa3fbe36..5ba14e6b88 100644 --- a/tests/frontend/preg.py +++ b/tests/frontend/preg.py @@ -214,9 +214,9 @@ def testParseRegistry(self): event_objects = data.get(usb_plugin, []) self.assertEqual(len(event_objects), 5) - event_object = event_objects[2] + event = event_objects[2] - self.assertEqual(event_object.data_type, u'windows:registry:key_value') + self.assertEqual(event.data_type, u'windows:registry:key_value') parse_key_data = front_end.ParseRegistryKey( usb_key, registry_helper, use_plugins=u'windows_usbstor_devices') @@ -224,10 +224,15 @@ def testParseRegistry(self): self.assertEqual(len(parse_key_data.keys()), 1) parsed_key_value = parse_key_data.values()[0] - for index, event_object in enumerate(event_objects): + for index, event in enumerate(event_objects): parsed_key_event = parsed_key_value[index] - self.assertEqual( - event_object.EqualityString(), parsed_key_event.EqualityString()) + event_values = event.CopyToDict() + del event_values[u'uuid'] + + parsed_key_event_values = parsed_key_event.CopyToDict() + del parsed_key_event_values[u'uuid'] + + self.assertEqual(event_values, parsed_key_event_values) if __name__ == '__main__': diff --git a/tests/multi_processing/psort.py b/tests/multi_processing/psort.py index aabb7464f3..a01fcf014d 100644 --- a/tests/multi_processing/psort.py +++ b/tests/multi_processing/psort.py @@ -376,13 +376,14 @@ def testExportEvents(self): self.assertEqual(len(lines), 24) expected_line = ( - u'2016-10-16T15:13:43+00:00,' - u'mtime,' - u'FILE,' - u'OS mtime,' - u'OS:/tmp/test/test_data/syslog Type: file,' - u'filestat,' - u'OS:/tmp/test/test_data/syslog,-') + u'2014-11-18T01:15:43+00:00,' + u'Content Modification Time,' + u'LOG,' + u'Log File,' + u'[---] last message repeated 5 times ---,' + u'syslog,' + u'OS:/tmp/test/test_data/syslog,' + u'repeated') self.assertEqual(lines[14], expected_line) diff --git a/tests/serializer/json_serializer.py b/tests/serializer/json_serializer.py index f47ddb9bb6..562c0fa9c0 100644 --- a/tests/serializer/json_serializer.py +++ b/tests/serializer/json_serializer.py @@ -138,13 +138,12 @@ def testReadAndWriteSerializedEventObject(self): parent=volume_path_spec) expected_event_object = events.EventObject() + expected_event_object.uuid = u'c7a85a1cdf1740f5a3bf6d05f89fe474' expected_event_object.data_type = u'test:event2' expected_event_object.pathspec = path_spec expected_event_object.timestamp = 1234124 expected_event_object.timestamp_desc = u'Written' - # Prevent the event object for generating its own UUID. - expected_event_object.uuid = u'5a78777006de4ddb8d7bbe12ab92ccf8' expected_event_object.binary_string = b'\xc0\x90\x90binary' expected_event_object.empty_string = u'' @@ -190,8 +189,8 @@ def testReadAndWriteSerializedEventObject(self): u'string': u'Normal string', u'timestamp_desc': u'Written', u'timestamp': 1234124, - u'uuid': u'5a78777006de4ddb8d7bbe12ab92ccf8', u'unicode_string': u'And I am a unicorn.', + u'uuid': u'c7a85a1cdf1740f5a3bf6d05f89fe474', u'zero_integer': 0 } @@ -225,7 +224,6 @@ def testReadAndWriteSerializedEventSource(self): expected_event_source_dict = { u'path_spec': test_path_spec.comparable, - u'storage_session': 0, } event_source_dict = event_source.CopyToDict() @@ -240,8 +238,6 @@ def testReadAndWriteSerializedEventSource(self): def testReadAndWriteSerializedEventTag(self): """Test ReadSerialized and WriteSerialized of EventTag.""" expected_event_tag = events.EventTag(comment=u'My first comment.') - expected_event_tag.store_number = 234 - expected_event_tag.store_index = 18 expected_event_tag.AddLabels([u'Malware', u'Common']) json_string = ( @@ -260,8 +256,6 @@ def testReadAndWriteSerializedEventTag(self): expected_event_tag_dict = { u'comment': u'My first comment.', u'labels': [u'Malware', u'Common'], - u'store_index': 18, - u'store_number': 234, } event_tag_dict = event_tag.CopyToDict() diff --git a/tests/storage/fake_storage.py b/tests/storage/fake_storage.py index 5634dde123..7ea600e7b5 100644 --- a/tests/storage/fake_storage.py +++ b/tests/storage/fake_storage.py @@ -87,17 +87,17 @@ def testAddEventSource(self): def testAddEventTag(self): """Tests the AddEventTag function.""" session = sessions.Session() - test_events = self._CreateTestEvents() - event_tags = self._CreateTestEventTags() storage_writer = fake_storage.FakeStorageWriter(session) storage_writer.Open() + test_events = self._CreateTestEvents() for event in test_events: storage_writer.AddEvent(event) event_tag = None - for event_tag in event_tags: + test_event_tags = self._CreateTestEventTags(test_events) + for event_tag in test_event_tags: storage_writer.AddEventTag(event_tag) storage_writer.Close() diff --git a/tests/storage/gzip_file.py b/tests/storage/gzip_file.py index b4941f19be..26c9a1b29c 100644 --- a/tests/storage/gzip_file.py +++ b/tests/storage/gzip_file.py @@ -80,7 +80,6 @@ def testAddEventSource(self): def testAddEventTag(self): """Tests the AddEventTag function.""" test_events = self._CreateTestEvents() - event_tags = self._CreateTestEventTags() with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, u'storage.plaso') @@ -90,7 +89,8 @@ def testAddEventTag(self): for event in test_events: storage_file.AddEvent(event) - for event_tag in event_tags: + test_event_tags = self._CreateTestEventTags(test_events) + for event_tag in test_event_tags: storage_file.AddEventTag(event_tag) storage_file.Close() @@ -185,7 +185,6 @@ def testGetEventSources(self): def testGetEventTags(self): """Tests the GetEventTags function.""" test_events = self._CreateTestEvents() - event_tags = self._CreateTestEventTags() with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, u'storage.plaso') @@ -195,7 +194,8 @@ def testGetEventTags(self): for event in test_events: storage_file.AddEvent(event) - for event_tag in event_tags: + test_event_tags = self._CreateTestEventTags(test_events) + for event_tag in test_event_tags: storage_file.AddEventTag(event_tag) storage_file.Close() diff --git a/tests/storage/test_lib.py b/tests/storage/test_lib.py index fa242443c4..9277791994 100644 --- a/tests/storage/test_lib.py +++ b/tests/storage/test_lib.py @@ -15,6 +15,8 @@ class StorageTestCase(shared_test_lib.BaseTestCase): """The unit test case for a storage object.""" + # pylint: disable=protected-access + def _CreateTestEvents(self): """Creates events for testing. @@ -79,36 +81,41 @@ def _CreateTestEvents(self): return test_events - def _CreateTestEventTags(self): + def _CreateTestEventTags(self, test_events): """Creates the event tags for testing. + Args: + list[EventObject]: test_events. + Returns: - A list of event tags (instances of EventTag). + list[EventTag] event tags. """ event_tags = [] - event_tag = events.EventTag() - event_tag.store_index = 0 - event_tag.store_number = 1 - event_tag.comment = u'My comment' + event_identifier = test_events[0].GetIdentifier() + + event_tag = events.EventTag(comment=u'My comment') + event_tag.SetEventIdentifier(event_identifier) event_tags.append(event_tag) + event_identifier = test_events[1].GetIdentifier() + event_tag = events.EventTag() - event_tag.store_index = 1 - event_tag.store_number = 1 + event_tag.SetEventIdentifier(event_identifier) event_tag.AddLabel(u'Malware') event_tags.append(event_tag) - event_tag = events.EventTag() - event_tag.store_number = 1 - event_tag.store_index = 2 - event_tag.comment = u'This is interesting' + event_identifier = test_events[2].GetIdentifier() + + event_tag = events.EventTag(comment=u'This is interesting') + event_tag.SetEventIdentifier(event_identifier) event_tag.AddLabels([u'Malware', u'Benign']) event_tags.append(event_tag) + event_identifier = test_events[1].GetIdentifier() + event_tag = events.EventTag() - event_tag.store_index = 1 - event_tag.store_number = 1 + event_tag.SetEventIdentifier(event_identifier) event_tag.AddLabel(u'Interesting') event_tags.append(event_tag) diff --git a/tests/storage/zip_file.py b/tests/storage/zip_file.py index 0ccf5d111a..958d2556e4 100644 --- a/tests/storage/zip_file.py +++ b/tests/storage/zip_file.py @@ -16,6 +16,7 @@ from plaso.lib import definitions from plaso.lib import timelib from plaso.formatters import winreg # pylint: disable=unused-import +from plaso.storage import identifiers from plaso.storage import time_range from plaso.storage import zip_file @@ -43,12 +44,12 @@ def testReadAndSeek(self): entry_data1 = data_stream.ReadEntry() self.assertEqual(data_stream.entry_index, 1) - self.assertEqual(data_stream._stream_offset, 671) + self.assertEqual(data_stream._stream_offset, 654) self.assertIsNotNone(entry_data1) entry_data2 = data_stream.ReadEntry() self.assertEqual(data_stream.entry_index, 2) - self.assertEqual(data_stream._stream_offset, 1340) + self.assertEqual(data_stream._stream_offset, 1306) self.assertIsNotNone(entry_data2) # Read more entries than in the stream. @@ -56,19 +57,19 @@ def testReadAndSeek(self): entry_data = data_stream.ReadEntry() self.assertEqual(data_stream.entry_index, 19) - self.assertEqual(data_stream._stream_offset, 12745) + self.assertEqual(data_stream._stream_offset, 12473) self.assertIsNone(entry_data) - data_stream.SeekEntryAtOffset(1, 671) + data_stream.SeekEntryAtOffset(1, 654) entry_data = data_stream.ReadEntry() self.assertEqual(data_stream.entry_index, 2) - self.assertEqual(data_stream._stream_offset, 1340) + self.assertEqual(data_stream._stream_offset, 1306) self.assertEqual(entry_data, entry_data2) data_stream.SeekEntryAtOffset(0, 0) entry_data = data_stream.ReadEntry() self.assertEqual(data_stream.entry_index, 1) - self.assertEqual(data_stream._stream_offset, 671) + self.assertEqual(data_stream._stream_offset, 654) self.assertEqual(entry_data, entry_data1) with self.assertRaises(IOError): @@ -155,13 +156,13 @@ def testGetOffset(self): offset_table.Read() self.assertEqual(offset_table.GetOffset(0), 0) - self.assertEqual(offset_table.GetOffset(1), 671) + self.assertEqual(offset_table.GetOffset(1), 654) with self.assertRaises(IndexError): offset_table.GetOffset(99) - self.assertEqual(offset_table.GetOffset(-1), 12067) - self.assertEqual(offset_table.GetOffset(-2), 11399) + self.assertEqual(offset_table.GetOffset(-1), 11841) + self.assertEqual(offset_table.GetOffset(-2), 11209) with self.assertRaises(IndexError): offset_table.GetOffset(-99) @@ -220,14 +221,14 @@ def testGetTimestamp(self): zip_file_object, stream_name) timestamp_table.Read() - self.assertEqual(timestamp_table.GetTimestamp(0), 1453449153000000) - self.assertEqual(timestamp_table.GetTimestamp(1), 1453449153000000) + self.assertEqual(timestamp_table.GetTimestamp(0), 1327218753000000) + self.assertEqual(timestamp_table.GetTimestamp(1), 1327218753000000) with self.assertRaises(IndexError): timestamp_table.GetTimestamp(99) - self.assertEqual(timestamp_table.GetTimestamp(-1), 1483206872000000) - self.assertEqual(timestamp_table.GetTimestamp(-2), 1482083672000000) + self.assertEqual(timestamp_table.GetTimestamp(-1), 1485089424000000) + self.assertEqual(timestamp_table.GetTimestamp(-2), 1485089422000000) with self.assertRaises(IndexError): timestamp_table.GetTimestamp(-99) @@ -313,41 +314,21 @@ def _CreateTestStorageFileWithTags(self, path): for event in test_events: storage_file.AddEvent(event) - test_event_tags = self._CreateTestEventTags() + test_event_tags = self._CreateTestEventTags(test_events) storage_file.AddEventTags(test_event_tags[:-1]) storage_file.AddEventTags(test_event_tags[-1:]) storage_file.Close() - def _GetTaggedEvent(self, storage_file, event_tag): - """Retrieves the event object for a specific event tag. - - Args: - storage_file: a storage file (instance of StorageFile). - event_tag: an event tag object (instance of EventTag). - - Returns: - An event object (instance of EventObject) or None if no corresponding - event was found. - """ - event = storage_file._GetEvent( - event_tag.store_number, entry_index=event_tag.store_index) - if not event: - return - - event.tag = event_tag - return event - - @shared_test_lib.skipUnlessHasTestFile([u'psort_test.json.plaso']) - def testBuildTagIndex(self): - """Tests the _BuildTagIndex function.""" + def testBuildEventTagIndex(self): + """Tests the _BuildEventTagIndex function.""" test_file = self._GetTestFilePath([u'psort_test.json.plaso']) storage_file = zip_file.ZIPStorageFile() storage_file.Open(path=test_file) self.assertIsNone(storage_file._event_tag_index) - storage_file._BuildTagIndex() + storage_file._BuildEventTagIndex() self.assertIsNotNone(storage_file._event_tag_index) @@ -378,10 +359,6 @@ def testGetEvent(self): storage_file = zip_file.ZIPStorageFile() storage_file.Open(path=test_file) - # TODO: make this raise IOError. - event = storage_file._GetEvent(0) - self.assertIsNone(event) - # There are 19 events in the first event data stream. for _ in range(0, 19): event = storage_file._GetEvent(1) @@ -399,6 +376,9 @@ def testGetEvent(self): event = storage_file._GetEvent(1, entry_index=19) self.assertIsNone(event) + with self.assertRaises(ValueError): + storage_file._GetEvent(0) + with self.assertRaises(ValueError): storage_file._GetEvent(1, entry_index=-2) @@ -414,12 +394,6 @@ def testGetEventObjectSerializedData(self): storage_file = zip_file.ZIPStorageFile() storage_file.Open(path=test_file) - # TODO: make this raise IOError. - data_tuple = storage_file._GetEventSerializedData(0) - self.assertIsNotNone(data_tuple) - self.assertIsNone(data_tuple[0]) - self.assertIsNone(data_tuple[1]) - # There are 19 events in the first event data stream. for entry_index in range(0, 19): data_tuple = storage_file._GetEventSerializedData(1) @@ -448,6 +422,9 @@ def testGetEventObjectSerializedData(self): # TODO: make the behavior of this method more consistent. self.assertIsNone(data_tuple[1]) + with self.assertRaises(ValueError): + storage_file._GetEventSerializedData(0) + with self.assertRaises(ValueError): storage_file._GetEventSerializedData(1, entry_index=-2) @@ -466,10 +443,6 @@ def testGetEventSource(self): storage_file = zip_file.ZIPStorageFile() storage_file.Open(path=test_file) - # TODO: make this raise IOError. - event_source = storage_file._GetEventSource(0) - self.assertIsNone(event_source) - # There is 1 event source in the first event data stream. for _ in range(0, 1): event_source = storage_file._GetEventSource(1) @@ -484,11 +457,14 @@ def testGetEventSource(self): event_source = storage_file._GetEventSource(1, entry_index=1) self.assertIsNone(event_source) + with self.assertRaises(ValueError): + storage_file._GetEventSource(0) + with self.assertRaises(ValueError): storage_file._GetEventSource(1, entry_index=-2) - event_source = storage_file._GetEventSource(3) - self.assertIsNone(event_source) + with self.assertRaises(ValueError): + event_source = storage_file._GetEventSource(3) storage_file.Close() @@ -499,12 +475,6 @@ def testGetEventSourceSerializedData(self): storage_file = zip_file.ZIPStorageFile() storage_file.Open(path=test_file) - # TODO: make this raise IOError. - data_tuple = storage_file._GetEventSourceSerializedData(0) - self.assertIsNotNone(data_tuple) - self.assertIsNone(data_tuple[0]) - self.assertIsNone(data_tuple[1]) - # There is 1 event source in the first event data stream. for entry_index in range(0, 1): data_tuple = storage_file._GetEventSourceSerializedData(1) @@ -528,14 +498,14 @@ def testGetEventSourceSerializedData(self): # TODO: make the behavior of this method more consistent. self.assertIsNone(data_tuple[1]) + with self.assertRaises(ValueError): + storage_file._GetEventSourceSerializedData(0) + with self.assertRaises(ValueError): storage_file._GetEventSourceSerializedData(1, entry_index=-2) - data_tuple = storage_file._GetEventSourceSerializedData(3) - self.assertIsNotNone(data_tuple) - self.assertIsNone(data_tuple[0]) - # TODO: make the behavior of this method more consistent. - self.assertIsNone(data_tuple[1]) + with self.assertRaises(ValueError): + storage_file._GetEventSourceSerializedData(3) storage_file.Close() @@ -728,7 +698,7 @@ def testGetSortedEvent(self): storage_file = zip_file.ZIPStorageFile() storage_file.Open(path=test_file) - expected_timestamp = 1453449153000000 + expected_timestamp = 1327218753000000 event = storage_file._GetSortedEvent() self.assertIsNotNone(event) @@ -736,7 +706,7 @@ def testGetSortedEvent(self): # Test lower bound time range filter. test_time_range = time_range.TimeRange( - timelib.Timestamp.CopyFromString(u'2016-04-30 06:41:49'), + timelib.Timestamp.CopyFromString(u'2012-04-30 06:41:49'), timelib.Timestamp.CopyFromString(u'2030-12-31 23:59:59')) storage_file.Close() @@ -744,7 +714,7 @@ def testGetSortedEvent(self): storage_file = zip_file.ZIPStorageFile() storage_file.Open(path=test_file) - expected_timestamp = 1476630823000000 + expected_timestamp = 1355853272000000 event = storage_file._GetSortedEvent(time_range=test_time_range) self.assertEqual(event.timestamp, expected_timestamp) @@ -752,14 +722,14 @@ def testGetSortedEvent(self): # Test upper bound time range filter. test_time_range = time_range.TimeRange( timelib.Timestamp.CopyFromString(u'2000-01-01 00:00:00'), - timelib.Timestamp.CopyFromString(u'2016-04-30 06:41:49')) + timelib.Timestamp.CopyFromString(u'2012-04-30 06:41:49')) storage_file.Close() storage_file = zip_file.ZIPStorageFile() storage_file.Open(path=test_file) - expected_timestamp = 1453449153000000 + expected_timestamp = 1327218753000000 event = storage_file._GetSortedEvent(time_range=test_time_range) self.assertEqual(event.timestamp, expected_timestamp) @@ -847,8 +817,8 @@ def testReadAttributeContainersFromStream(self): storage_file.Close() - def testReadEventTagByIdentifier(self): - """Tests the _ReadEventTagByIdentifier function.""" + def testGetEventTagByIdentifier(self): + """Tests the _GetEventTagByIdentifier function.""" with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, u'storage.plaso') self._CreateTestStorageFileWithTags(temp_file) @@ -856,10 +826,14 @@ def testReadEventTagByIdentifier(self): storage_file = zip_file.ZIPStorageFile() storage_file.Open(path=temp_file) - event_tag = storage_file._ReadEventTagByIdentifier(0, 0, u'') - self.assertIsNone(event_tag) + event_identifier = identifiers.SerializedStreamIdentifier(1, 0) + event_tag = storage_file._GetEventTagByIdentifier(event_identifier) + self.assertIsNotNone(event_tag) + self.assertEqual(event_tag.comment, u'My comment') - # TODO: add positive test. + event_identifier = identifiers.SerializedStreamIdentifier(99, 0) + event_tag = storage_file._GetEventTagByIdentifier(event_identifier) + self.assertIsNone(event_tag) storage_file.Close() @@ -957,7 +931,6 @@ def testWriteSerializedEventSources(self): def testWriteSerializedEventTags(self): """Tests the _WriteSerializedEventTags function.""" test_events = self._CreateTestEvents() - event_tags = self._CreateTestEventTags() with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, u'storage.plaso') @@ -967,7 +940,8 @@ def testWriteSerializedEventTags(self): for event in test_events: storage_file.AddEvent(event) - for event_tag in event_tags: + test_event_tags = self._CreateTestEventTags(test_events) + for event_tag in test_event_tags: storage_file.AddEventTag(event_tag) storage_file._WriteSerializedEvents() @@ -1075,7 +1049,6 @@ def testAddEventSource(self): def testAddEventTag(self): """Tests the AddEventTag function.""" test_events = self._CreateTestEvents() - event_tags = self._CreateTestEventTags() with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, u'storage.plaso') @@ -1085,7 +1058,8 @@ def testAddEventTag(self): for event in test_events: storage_file.AddEvent(event) - for event_tag in event_tags: + test_event_tags = self._CreateTestEventTags(test_events) + for event_tag in test_event_tags: storage_file.AddEventTag(event_tag) storage_file.Close() @@ -1095,7 +1069,6 @@ def testAddEventTag(self): def testAddEventTags(self): """Tests the AddEventTags function.""" test_events = self._CreateTestEvents() - event_tags = self._CreateTestEventTags() with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, u'storage.plaso') @@ -1105,8 +1078,9 @@ def testAddEventTags(self): for event in test_events: storage_file.AddEvent(event) - storage_file.AddEventTags(event_tags[:-1]) - storage_file.AddEventTags(event_tags[-1:]) + test_event_tags = self._CreateTestEventTags(test_events) + storage_file.AddEventTags(test_event_tags[:-1]) + storage_file.AddEventTags(test_event_tags[-1:]) storage_file.Close() @@ -1230,7 +1204,12 @@ def testGetEventTags(self): storage_file.Open(path=temp_file) for event_tag in storage_file.GetEventTags(): - event = self._GetTaggedEvent(storage_file, event_tag) + event_identifier = event_tag.GetEventIdentifier() + event = storage_file._GetEvent( + event_identifier.stream_number, + entry_index=event_identifier.entry_index) + + event.tag = event_tag tagged_events.append(event) storage_file.Close() @@ -1466,19 +1445,19 @@ def testWriteTaskStartAndCompletion(self): class ZIPStorageFileReaderTest(test_lib.StorageTestCase): """Tests for the ZIP-based storage file reader.""" - _EXPECTED_TIMESTAMPS_BEFORE_20060430 = [ - 1453449153000000, 1453449153000000, 1453449153000000, 1453449153000000, - 1453449181000000, 1453449181000000, 1453449241000000, 1453449241000000, - 1453449241000000, 1453449241000000, 1453449272000000, 1453449272000000, - 1454771790000000, 1454771790000000, 1456708543000000, 1456708543000000, - 1458774078000000, 1458774078000000, 1458774078000000, 1458774078000000] + _EXPECTED_TIMESTAMPS_BEFORE_20120430 = [ + 1327218753000000, 1327218753000000, 1327218753000000, 1327218753000000, + 1327218781000000, 1327218781000000, 1327218841000000, 1327218841000000, + 1327218841000000, 1327218841000000, 1327218872000000, 1327218872000000, + 1330478143000000, 1330478143000000] - _EXPECTED_TIMESTAMPS_AFTER_20060430 = [ - 1476630823000000, 1476630823000000, 1476630823000000, 1476630823000000, - 1476630824000000, 1476630824000000, 1479431720000000, 1479431720000000, - 1479431743000000, 1479431743000000, 1479457820000000, 1479457820000000, - 1479457880000000, 1479457880000000, 1482083672000000, 1482083672000000, - 1483206872000000, 1483206872000000] + _EXPECTED_TIMESTAMPS_AFTER_20120430 = [ + 1355853272000000, 1355853272000000, 1364079678000000, 1364079678000000, + 1364079678000000, 1364079678000000, 1384737320000000, 1384737320000000, + 1388512472000000, 1388512472000000, 1391699790000000, 1391699790000000, + 1416273343000000, 1416273343000000, 1416299420000000, 1416299420000000, + 1416299480000000, 1416299480000000, 1485089422000000, 1485089422000000, + 1485089422000000, 1485089422000000, 1485089423000000, 1485089424000000] @shared_test_lib.skipUnlessHasTestFile([u'psort_test.json.plaso']) def testGetEvents(self): @@ -1491,15 +1470,15 @@ def testGetEvents(self): timestamps.append(event.timestamp) expected_timestamps = [] - expected_timestamps.extend(self._EXPECTED_TIMESTAMPS_BEFORE_20060430) - expected_timestamps.extend(self._EXPECTED_TIMESTAMPS_AFTER_20060430) + expected_timestamps.extend(self._EXPECTED_TIMESTAMPS_BEFORE_20120430) + expected_timestamps.extend(self._EXPECTED_TIMESTAMPS_AFTER_20120430) self.assertEqual(len(timestamps), 38) self.assertEqual(sorted(timestamps), expected_timestamps) # Test lower bound time range filter. test_time_range = time_range.TimeRange( - timelib.Timestamp.CopyFromString(u'2016-04-30 06:41:49'), + timelib.Timestamp.CopyFromString(u'2012-04-30 06:41:49'), timelib.Timestamp.CopyFromString(u'2030-12-31 23:59:59')) timestamps = [] @@ -1507,21 +1486,21 @@ def testGetEvents(self): for event in storage_reader.GetEvents(time_range=test_time_range): timestamps.append(event.timestamp) - expected_timestamps = self._EXPECTED_TIMESTAMPS_AFTER_20060430 + expected_timestamps = self._EXPECTED_TIMESTAMPS_AFTER_20120430 self.assertEqual(sorted(timestamps), expected_timestamps) # Test upper bound time range filter. test_time_range = time_range.TimeRange( timelib.Timestamp.CopyFromString(u'2000-01-01 00:00:00'), - timelib.Timestamp.CopyFromString(u'2016-04-30 06:41:49')) + timelib.Timestamp.CopyFromString(u'2012-04-30 06:41:49')) timestamps = [] with zip_file.ZIPStorageFileReader(test_file) as storage_reader: for event in storage_reader.GetEvents(time_range=test_time_range): timestamps.append(event.timestamp) - expected_timestamps = self._EXPECTED_TIMESTAMPS_BEFORE_20060430 + expected_timestamps = self._EXPECTED_TIMESTAMPS_BEFORE_20120430 self.assertEqual(sorted(timestamps), expected_timestamps) @@ -1594,7 +1573,6 @@ def testAddEventTag(self): """Tests the AddEventTag function.""" session = sessions.Session() test_events = self._CreateTestEvents() - event_tags = self._CreateTestEventTags() with shared_test_lib.TempDirectory() as temp_directory: temp_file = os.path.join(temp_directory, u'storage.plaso') @@ -1604,7 +1582,8 @@ def testAddEventTag(self): for event in test_events: storage_writer.AddEvent(event) - for event_tag in event_tags: + test_event_tags = self._CreateTestEventTags(test_events) + for event_tag in test_event_tags: storage_writer.AddEventTag(event_tag) storage_writer.Close() diff --git a/tools/pinfo_test.py b/tools/pinfo_test.py index b2233dec57..61c5e98588 100644 --- a/tools/pinfo_test.py +++ b/tools/pinfo_test.py @@ -94,11 +94,11 @@ def testPrintStorageInformationAsText(self): test_tool = pinfo.PinfoTool(output_writer=output_writer) test_filename = u'pinfo_test.json.plaso' - format_version = u'20160715' - plaso_version = u'1.5.1_20161013' - session_identifier = u'3c552fe3-4e64-4871-8a7f-0f4c95dfc1fe' - session_start_time = u'2016-10-16T15:13:58.171984+00:00' - session_completion_time = u'2016-10-16T15:13:58.957462+00:00' + format_version = u'20170121' + plaso_version = u'1.5.2_20170119' + session_identifier = u'98d1caaa-5224-4cf0-90bb-cb766ffb4d6a' + session_start_time = u'2017-01-22T12:50:47.479205+00:00' + session_completion_time = u'2017-01-22T12:50:48.275535+00:00' command_line_arguments = ( u'./tools/log2timeline.py --partition=all --quiet ' @@ -148,7 +148,8 @@ def testPrintStorageInformationAsText(self): u'winreg/windows_timezone', u'winreg/windows_typed_urls', u'winreg/windows_usb_devices', u'winreg/windows_usbstor_devices', u'winreg/windows_version', u'winreg/winlogon', u'winreg/winrar_mru', - u'winreg/winreg_default', u'xchatlog', u'xchatscrollback']) + u'winreg/winreg_default', u'xchatlog', u'xchatscrollback', + u'zsh_extended_history']) table_view = cli_views.ViewsFactory.GetTableView( cli_views.ViewsFactory.FORMAT_TYPE_CLI, @@ -215,9 +216,9 @@ def testPrintStorageInformationAsText(self): def testPrintStorageInformationAsJSON(self): """Tests the _PrintStorageInformationAsJSON function.""" test_filename = u'pinfo_test.json.plaso' - session_identifier = u'3c552fe34e6448718a7f0f4c95dfc1fe' + session_identifier = u'98d1caaa52244cf090bbcb766ffb4d6a' session_start_time = timelib.Timestamp.CopyFromString( - u'2016-10-16 15:13:58.171984+00:00') + u'2017-01-22 12:50:47.479205+00:00') output_writer = cli_test_lib.TestOutputWriter(encoding=u'utf-8') test_tool = pinfo.PinfoTool(output_writer=output_writer) test_file = self._GetTestFilePath([test_filename]) @@ -231,7 +232,11 @@ def testPrintStorageInformationAsJSON(self): test_tool.PrintStorageInformation() output = output_writer.ReadOutput() json_output = json.loads(output) - first_session = json_output[u'session_3c552fe34e6448718a7f0f4c95dfc1fe'] + + first_session_identifier = u'session_{0:s}'.format(session_identifier) + first_session = json_output.get(first_session_identifier, None) + self.assertIsNotNone(first_session) + self.assertEqual(first_session[u'identifier'], session_identifier) self.assertEqual(first_session[u'start_time'], session_start_time) diff --git a/tools/psteal_test.py b/tools/psteal_test.py index ce857c21b0..3e3ec0ee1c 100644 --- a/tools/psteal_test.py +++ b/tools/psteal_test.py @@ -295,11 +295,15 @@ def testProcessStorage(self): expected_output_file_name = self._GetTestFilePath( [u'end_to_end', u'dynamic.log']) - with open(expected_output_file_name, 'r') as expected_output_file, open( - result_file_name, 'r') as result_file: - expected_output = expected_output_file.read() - result = result_file.read() - self.assertEqual(expected_output, result) + with open(expected_output_file_name, 'r') as file_object: + expected_output = file_object.read() + + with open(result_file_name, 'r') as file_object: + result_output = file_object.read() + + expected_output = sorted(expected_output.split(b'\n')) + result_output = sorted(result_output.split(b'\n')) + self.assertEqual(expected_output, result_output) output = output_writer.ReadOutput() self.assertIn(u'Events processed : 38', output) diff --git a/utils/pylintrc b/utils/pylintrc index e449f1c9f3..ad949ad4dc 100644 --- a/utils/pylintrc +++ b/utils/pylintrc @@ -63,15 +63,17 @@ load-plugins= # W1201: Specify string format arguments as logging function parameters # # Disabled: +# consider-iterating-dictionary # locally-enabled # logging-format-interpolation +# no-member # redefined-variable-type # simplifiable-if-statement # too-many-boolean-expressions (N/5) # too-many-nested-blocks (N/5) # ungrouped-imports -disable=C0103,C0302,I0010,I0011,R0201,R0801,R0901,R0902,R0903,R0904,R0911,R0912,R0913,R0914,R0915,R0921,R0922,W0141,W0142,W0402,W0404,W0511,W1201,locally-enabled,logging-format-interpolation,redefined-variable-type,simplifiable-if-statement,too-many-boolean-expressions,too-many-nested-blocks,ungrouped-imports +disable=C0103,C0302,I0010,I0011,R0201,R0801,R0901,R0902,R0903,R0904,R0911,R0912,R0913,R0914,R0915,R0921,R0922,W0141,W0142,W0402,W0404,W0511,W1201,consider-iterating-dictionary,locally-enabled,logging-format-interpolation,no-member,redefined-variable-type,simplifiable-if-statement,too-many-boolean-expressions,too-many-nested-blocks,ungrouped-imports [REPORTS]