Skip to content

Commit

Permalink
Fix task required states
Browse files Browse the repository at this point in the history
  • Loading branch information
aarontp committed Oct 7, 2023
1 parent 3bc5217 commit cd18e38
Show file tree
Hide file tree
Showing 10 changed files with 30 additions and 25 deletions.
12 changes: 6 additions & 6 deletions turbinia/evidence.py
Original file line number Diff line number Diff line change
Expand Up @@ -363,7 +363,7 @@ def from_dict(cls, dictionary):

def serialize_attribute(self, name: str) -> str:
"""Returns JSON serialized attribute.
Args:
name(str): the name of the attribute that will be serialized.
Returns:
Expand All @@ -381,7 +381,7 @@ def serialize(self, json_values: bool = False):
"""Returns a JSON serialized object. The function will return A string
containing the serialized evidence_dict or a dict of serialized attributes
if json_values is true.
Args:
json_values(bool): Returns only values of the dictionary as json strings
instead of the entire dictionary.
Expand Down Expand Up @@ -600,7 +600,7 @@ def format_state(self):

def _validate(self):
"""Runs additional logic to validate evidence requirements.
Evidence subclasses can override this method to perform custom
validation of evidence objects.
"""
Expand Down Expand Up @@ -847,13 +847,13 @@ def _preprocess(self, _, required_states):
except TurbiniaException as exception:
log.error(exception)

if len(path_specs) > 1:
if path_spec and len(path_specs) > 1:
path_specs_dicts = [path_spec.CopyToDict() for path_spec in path_specs]
raise TurbiniaException(
'Found more than one path_spec for {0:s} {1:s}: {2!s}'.format(
self.parent_evidence.name, self.partition_location,
path_specs_dicts))
elif len(path_specs) == 1:
elif path_spec and len(path_specs) == 1:
self.path_spec = path_specs[0]
log.debug(
'Found path_spec {0!s} for parent evidence {1:s}'.format(
Expand Down Expand Up @@ -1044,7 +1044,7 @@ def __init__(self, plaso_version=None, *args, **kwargs):

def _validate(self):
"""Validates whether the Plaso file contains any events.
Raises:
TurbiniaException: if validation fails.
"""
Expand Down
1 change: 1 addition & 0 deletions turbinia/workers/analysis/jenkins.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
class JenkinsAnalysisTask(TurbiniaTask):
"""Task to analyze a Jenkins install."""

# Input is typically extract artifact so does not need to be MOUNTED
REQUIRED_STATES = [state.ATTACHED, state.CONTAINER_MOUNTED]

TASK_CONFIG = {
Expand Down
3 changes: 2 additions & 1 deletion turbinia/workers/analysis/jupyter.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,8 @@
class JupyterAnalysisTask(TurbiniaTask):
"""Task to analyze a Jupyter Notebook config."""

REQUIRED_STATES = [state.ATTACHED, state.CONTAINER_MOUNTED]
# Input is typically extracted artifact so does not need to be MOUNTED
REQUIRED_STATES = [state.ATTACHED, state.MOUNTED, state.CONTAINER_MOUNTED]

def run(self, evidence, result):
"""Run the Jupyter worker.
Expand Down
2 changes: 1 addition & 1 deletion turbinia/workers/analysis/linux_acct.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ class LinuxAccountAnalysisTask(TurbiniaTask):
"""Task to analyze a Linux password file."""

REQUIRED_STATES = [
state.ATTACHED, state.CONTAINER_MOUNTED, state.DECOMPRESSED
state.ATTACHED, state.MOUNTED, state.CONTAINER_MOUNTED, state.DECOMPRESSED
]

TASK_CONFIG = {
Expand Down
30 changes: 15 additions & 15 deletions turbinia/workers/analysis/ssh_analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def __init__(
def calculate_session_id(self) -> None:
"""Calculates pseudo session_id for SSH login.
The pseudo session_id is based on date, hostname, username, source_ip,
The pseudo session_id is based on date, hostname, username, source_ip,
and source_port.
"""
# TODO(rmaskey): Find a better way to generate pseudo session_id. Current
Expand All @@ -81,7 +81,7 @@ def calculate_session_id(self) -> None:
class LinuxSSHAnalysisTask(TurbiniaTask):
"""Task to analyze Linux SSH authentication."""

REQUIRED_STATES = [state.MOUNTED, state.CONTAINER_MOUNTED]
REQUIRED_STATES = [state.ATTACHED, state.MOUNTED, state.CONTAINER_MOUNTED]

# Log year validation
# The minimum supported log year
Expand Down Expand Up @@ -150,10 +150,10 @@ class LinuxSSHAnalysisTask(TurbiniaTask):

def read_logs(self, log_dir: str) -> pd.DataFrame:
"""Reads SSH logs directory and returns Pandas dataframe.
Args:
log_dir (str): Directory containing SSH authentication log.
Returns:
pd.DataFrame: Returns Pandas dataframe.
"""
Expand Down Expand Up @@ -226,12 +226,12 @@ def read_logs(self, log_dir: str) -> pd.DataFrame:
def parse_message_datetime(
self, message_datetime: List, log_year: int) -> datetime:
"""Parses and returns datetime.
Args:
message_datetime (List[str]): A list containing syslog datetime separated
by spaces e.g. Feb 8 13:30:45 for Debian, and Red Hat, and
2023-02-08T13:30:45.123456+11:00 for OpenSUSE.
log_year (int): A user provided log year for SSH events. The log year is
log_year (int): A user provided log year for SSH events. The log year is
not captured by syslog and this is either provided by user or guessed
based on the last SSH event and current date/time.
Expand Down Expand Up @@ -261,14 +261,14 @@ def parse_message_datetime(
def read_log_data(self, data, log_filename: str,
log_year: int = None) -> List[SSHEventData]:
"""Parses SSH log data and returns a list of SSHEventData.
Args:
data (str): Content of authentication log file.
log_filename (str): Name of the log file whose content is read.
log_year (int): SSH authentication log year.
Returns:
List(SSHEventData): Returns SSH events as list of SSHEventData.
List(SSHEventData): Returns SSH events as list of SSHEventData.
"""
# check valid year is provided
# If valid year isn't provided raise error
Expand Down Expand Up @@ -356,10 +356,10 @@ def read_log_data(self, data, log_filename: str,

def get_priority_value(self, priority_string: str) -> Priority:
"""Returns priority value.
Args:
priority_string (str): Priority values as string e.g. HIGH, MEDIUM, LOW
Returns:
Priority: Returns priority value of priority_string.
"""
Expand All @@ -374,15 +374,15 @@ def get_priority_value(self, priority_string: str) -> Priority:

def brute_force_analysis(self, df: pd.DataFrame) -> Tuple[Priority, str, str]:
"""Runs brute force analysis.
Args:
df (pd.DataFrame): Pandas dataframe of SSH events.
Returns:
Tuple[Priority, str, str]: Returns brute force analysis result as tuple.
Priority: Priority of the findings.
str: Brief summary of the findings.
str: Detailed information as markdown.
str: Detailed information as markdown.
"""
bfa = BruteForceAnalyzer()

Expand Down Expand Up @@ -416,7 +416,7 @@ def run(
Args:
evidence (Evidence object): The evidence being processed by analyzer.
result (TurbiniaTaskResult): The object to place task results into.
Returns:
TurbiniaTaskResult object.
"""
Expand Down
1 change: 1 addition & 0 deletions turbinia/workers/analysis/wordpress_access.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
class WordpressAccessLogAnalysisTask(TurbiniaTask):
"""Task to analyze Wordpress access logs."""

# Input is typically extracted artifact so does not need to be MOUNTED
REQUIRED_STATES = [state.ATTACHED, state.CONTAINER_MOUNTED]

timestamp_regex = re.compile(r'\[(?P<timestamp>.+)\]')
Expand Down
2 changes: 1 addition & 1 deletion turbinia/workers/analysis/wordpress_creds.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ class WordpressCredsAnalysisTask(TurbiniaTask):
"""Task to analyze the credentials of a Wordpress instance."""

REQUIRED_STATES = [
state.ATTACHED, state.CONTAINER_MOUNTED, state.DECOMPRESSED
state.ATTACHED, state.MOUNTED, state.CONTAINER_MOUNTED, state.DECOMPRESSED
]

TASK_CONFIG = {
Expand Down
1 change: 1 addition & 0 deletions turbinia/workers/redis.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
class RedisAnalysisTask(TurbiniaTask):
"""Task to analyze a Redis configuration file."""

# Input is typically extracted artifact so does not need to be MOUNTED
REQUIRED_STATES = [state.ATTACHED, state.CONTAINER_MOUNTED]

def run(self, evidence, result):
Expand Down
2 changes: 1 addition & 1 deletion turbinia/workers/sshd.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
class SSHDAnalysisTask(TurbiniaTask):
"""Task to analyze a sshd_config file."""

REQUIRED_STATES = [state.ATTACHED, state.CONTAINER_MOUNTED]
REQUIRED_STATES = [state.ATTACHED, state.MOUNTED, state.CONTAINER_MOUNTED]

def run(self, evidence, result):
"""Run the sshd_config analysis worker.
Expand Down
1 change: 1 addition & 0 deletions turbinia/workers/tomcat.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
class TomcatAnalysisTask(TurbiniaTask):
"""Task to analyze a Tomcat file."""

# Input is typically extracted artifact so does not need to be MOUNTED
REQUIRED_STATES = [state.ATTACHED, state.CONTAINER_MOUNTED]

def run(self, evidence, result):
Expand Down

0 comments on commit cd18e38

Please sign in to comment.