1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169 | from collections import OrderedDict
VERSION = "1.1.0"
"""Version of this set of plugin definitions."""
MINIMUM_THAT_VERSION = "1.1.0"
"""Minimum THAT version required to run these plugin definitions."""
NAME = "patch"
PRIORITY = 3
ANALYZE_DATA = OrderedDict()
GET_TANIUM_DATA = OrderedDict()
GET_INTERNET_DATA = [
]
# ask the question 'Get Has Patch Tools from all machines'
# and store results in "haspatchtools.csv"
GET_TANIUM_DATA["haspatchtools.csv"] = {
"filters": [],
"sensors": ["Has Patch Tools"],
}
# ask the question 'Get Available Patches from all machines'
# and store results in "patches.csv"
GET_TANIUM_DATA["patches.csv"] = {
"filters": [],
"sensors": ["Available Patches"],
}
# ask the question 'Get Available Patch Status from all machines'
# and store results in "patchstatus.csv"
GET_TANIUM_DATA["patchstatus.csv"] = {
"filters": [],
"sensors": ["Available Patch Status"],
}
# ask the question 'Get Reboot Required from all machines'
# and store results in "patchreboot.csv"
GET_TANIUM_DATA["patchreboot.csv"] = {
"filters": [],
"sensors": ["Reboot Required"],
}
ANALYZE_DATA["extra_clean_values"] = """
# set extra clean values to be used for df cleaning
result = ["Scan Error", "N/A on Mac", "N/A on Solaris", "N/A on Linux", "N/A on AIX"]
"""
ANALYZE_DATA["cleaned_haspatchtools_df"] = """
# clean out noise from haspatchtools.csv
csv = "haspatchtools.csv"
df = self.load_csv_as_df(csv)
extra_clean_values = self.get_result("extra_clean_values")
result = self.clean_df(df, columns=["Has Patch Tools"], add_values=extra_clean_values)
"""
ANALYZE_DATA["cleaned_patches_df"] = """
# clean out noise from patches.csv
csv = "patches.csv"
df = self.load_csv_as_df(csv)
extra_clean_values = self.get_result("extra_clean_values")
result = self.clean_df(df, columns=["Title"], add_values=extra_clean_values)
"""
ANALYZE_DATA["cleaned_patchstatus_df"] = """
# clean out noise from patchstatus.csv
csv = "patchstatus.csv"
df = self.load_csv_as_df(csv)
extra_clean_values = self.get_result("extra_clean_values")
result = self.clean_df(df, columns=["Available Patch Status"], add_values=extra_clean_values)
"""
ANALYZE_DATA["cleaned_patchreboot_df"] = """
# clean out noise from patchreboot.csv
csv = "patchreboot.csv"
df = self.load_csv_as_df(csv)
extra_clean_values = self.get_result("extra_clean_values")
result = self.clean_df(df, columns=["Reboot Required"], add_values=extra_clean_values)
"""
ANALYZE_DATA["total_targets"] = """
# has patch tools (targets)
df = self.get_result("cleaned_haspatchtools_df")
result = df.loc[df['Has Patch Tools'] == 'Yes', 'Count'].sum()
"""
ANALYZE_DATA["any_patch_count"] = """
# Total Number of Missing Patches (Any Severity)
df = self.get_result("cleaned_patches_df")
result = df['Title'].count()
"""
ANALYZE_DATA["critical_patch_count"] = """
# Total Number of Missing Patches (Critical Severity)
df = self.get_result("cleaned_patches_df")
result = df.loc[df['Severity'] == 'Critical', 'Title'].count()
"""
ANALYZE_DATA["any_req_patch"] = """
# Total Required Patches Across Endpoints (any Severity)
df = self.get_result("cleaned_patches_df")
result = df['Count'].sum()
"""
ANALYZE_DATA["critical_req_patch"] = """
# Total Required Patches Across Endpoints (Critical Severity)
df = self.get_result("cleaned_patches_df")
result = df.loc[df['Severity'] == 'Critical', 'Count'].sum()
"""
ANALYZE_DATA["any_req_patch_avg"] = """
# Required Patches Across Endpoints Average (any Severity)
df = self.get_result("cleaned_patches_df")
result = df['Count'].mean()
"""
ANALYZE_DATA["oldest_patch"] = """
# Oldest Date Any Severity Patch
df = self.get_result("cleaned_patches_df")
result = df['Date'].min()
"""
ANALYZE_DATA["msft_prod_affected"] = """
# Approximate Number of Products Affected (Unique Product Count)
df = self.get_result("cleaned_patches_df")
result = len(df['Filename'].str.replace('\\\\-kb.*', '').unique())
"""
ANALYZE_DATA["five_critical"] = """
# More than 5 Critical Patches Required
df = self.get_result("cleaned_patchstatus_df")
result = df.loc[df['Available Patch Status'] == 'More than 5 Critical Patches Required', 'Count'].sum()
"""
ANALYZE_DATA["reboot_req"] = """
# Reboot Required
df = self.get_result("cleaned_patchreboot_df")
result = df.loc[df['Reboot Required'] == 'Yes', 'Count'].sum()
"""
|