Skip to content

Commit

Permalink
Merge branch 'main' into add-link-flapping-to-log-analyzer
Browse files Browse the repository at this point in the history
  • Loading branch information
boazhaim committed Sep 9, 2024
2 parents 20215ce + 59d13a0 commit edbfe6d
Show file tree
Hide file tree
Showing 7 changed files with 8 additions and 29 deletions.
3 changes: 1 addition & 2 deletions plugins/ufm_log_analyzer_plugin/README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# UFM LOGANALYZER

**Warning:** This feature is still under development and right now should only be used internally
**Warning:** This feature is still under development and should only be used internally

## What
This tool should help developers find issues in a UFM sysdump or logs.
Expand Down Expand Up @@ -59,7 +59,6 @@ options:
Depth of logs tar extraction, default is 1
--hours HOURS How many hours to process from last logs. Default is 6 hours
-i, --interactive Should an interactive Ipython session start. Default is False
-s, --show-output Should the output charts be presented. Default is False
--skip-tar-extract If the location is to an existing extracted tar or just UFM logs directory, skip the tar extraction and only copy the needed logs. Default is False
--interval [{1min,10min,1h,24h}]
Time interval for the graphs. Choices are: '1min'- Every minute, '10min'- Every ten minutes, '1h'- Every one hour, '24h'- Every 24 hours. Default is '1H'.
Expand Down
19 changes: 0 additions & 19 deletions plugins/ufm_log_analyzer_plugin/src/loganalyze/log_analyzer.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,6 @@
from pathlib import Path
import traceback
from typing import Callable, List, Set, Tuple
import subprocess
import platform
import matplotlib.pyplot as plt


from loganalyze.log_analyzers.base_analyzer import BaseImageCreator
Expand Down Expand Up @@ -197,12 +194,6 @@ def parse_args():
action="store_true",
help="Should an interactive Ipython session start. Default is False",
)
parser.add_argument(
"-s",
"--show-output",
action="store_true",
help="Should the output charts be presented. Default is False",
)
parser.add_argument(
"--skip-tar-extract",
action="store_true",
Expand Down Expand Up @@ -375,16 +366,6 @@ def create_analyzer(parsed_args, full_extracted_logs_list,
files_types_to_delete.add("csv") #tmp csv + telemetery samples
files_types_to_delete.add("gz") #gz files of logs and samples
delete_files_by_types(args.destination, files_types_to_delete)
if args.show_output:
if platform.system() == "Windows":
os.startfile(pdf_path) # pylint: disable=no-member
elif platform.system() == "Darwin": # macOS
subprocess.call(("open", pdf_path)) # pylint: disable=no-member
else: # Linux
subprocess.call(("xdg-open", pdf_path)) # pylint: disable=no-member

# This will show all the graphs we created
plt.show()
if args.interactive:
import IPython

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def print_exceptions(self):
def print_exceptions_per_time_count(self):
error_data = self._log_data_sorted[self._log_data_sorted["type"] == "Error"]
errors_per_hour = error_data.groupby(DataConstants.AGGREGATIONTIME).size()
self._save_data_based_on_timestamp(
images_created = self._save_data_based_on_timestamp(
errors_per_hour,
"Time",
"Amount of exceptions",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ def print_link_up_down_count_per_hour(self):
pivot_links_data = counted_links_events_by_time.pivot(
index=DataConstants.AGGREGATIONTIME, columns="event", values="count"
).fillna(0)
self._save_pivot_data_in_bars(
graph_images = self._save_pivot_data_in_bars(
pivot_links_data,
"Time",
"Number of Events",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def analyze_endpoints_freq(self, endpoints_count_to_show=10):
values='amount_per_uri').fillna(0)
data_to_show = data_to_show[top_x_uris]

self._save_pivot_data_in_bars(data_to_show,
return self._save_pivot_data_in_bars(data_to_show,
"time",
"requests count",
f"Top {endpoints_count_to_show} "\
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def print_failed_tests_per_hour(self):
pivot_failed_by_time = grouped_failed_only_relevant_by_time.pivot(
index=DataConstants.AGGREGATIONTIME, columns="test_name", values="count"
).fillna(0)
self._save_pivot_data_in_bars(
graph_images = self._save_pivot_data_in_bars(
pivot_failed_by_time,
"Time",
"Number of failures",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def full_analyze_ufm_loading_time(self):
if not matched_logs_df.empty:
matched_logs_df.drop(columns=["timestamp_end"], inplace=True)
matched_logs_df.set_index("timestamp_start", inplace=True)
self._save_data_based_on_timestamp(
create_images = self._save_data_based_on_timestamp(
matched_logs_df,
DataConstants.TIMESTAMP,
"Loading time Time (seconds)",
Expand Down Expand Up @@ -214,7 +214,7 @@ def full_analyze_fabric_analysis_time(self):
)
merged_logs.set_index("timestamp_start", inplace=True)
title = "Fabric analysis run time"
self._save_data_based_on_timestamp(
create_images = self._save_data_based_on_timestamp(
merged_logs, "Time", "Processing Time (s)", title
)

Expand Down Expand Up @@ -269,8 +269,7 @@ def full_telemetry_processing_time_report(self):

# Plot the data within the filtered time range
title = "Telemetry processing time"

self._save_data_based_on_timestamp(
create_images = self._save_data_based_on_timestamp(
minutely_mean_processing_time,
"Time",
"Processing Time (s)",
Expand Down

0 comments on commit edbfe6d

Please sign in to comment.