Compare commits

..

No commits in common. 'main' and 'AizenSousuke' have entirely different histories.

@ -1,16 +0,0 @@
{
"workbench.colorCustomizations": {
"sideBar.background": "#1f2329",
"activityBar.background": "#2A2E37",
"titleBar.activeBackground": "#2A2E37",
"editor.background": "#181B20",
"terminal.background": "#181B20",
"panel.background": "#181B20",
"dropdown.background": "#181B20",
"dropdown.listBackground": "#181B20",
"editorGroupHeader.tabsBackground": "#1F2229",
"tab.inactiveBackground": "#23282E",
"menu.background": "#2A2F37",
"menu.selectionBackground": "#0E629D"
}
}

@ -103,48 +103,15 @@ def evtxdetect_auto():
try:
#print(Security_path)
# 创建一个多进程对象用于并行分析用户配置文件UserProfile相关的日志
# target参数指定了新进程要执行的函数args参数传递了执行该函数所需的参数
userprofile = multiprocessing.Process(
target=EvtxDetection.multiprocess, # 要在新进程中执行的函数
args=( # 传递给函数的参数列表
UserProfile_path_list, # 用户配置文件路径的列表
EvtxDetection.detect_events_UserProfileService_log, # 检测事件的函数
input_timezone, # 输入时区信息
timestart, # 分析开始时间
timeend, # 分析结束时间
objectaccess, # 是否分析对象访问事件
processexec, # 是否分析进程执行事件
logons, # 是否分析登录事件
frequencyanalysis, # 是否进行频率分析
allreport, # 是否生成全部报告
Output, # 输出路径或相关配置
CPU_Core # 使用的CPU核心数
)
)#创建多线程分析windows日志其中参数包括用户配置文件的路径列表、一个检测事件的函数、时区信息、时间范围、以及一系列用于分析的选项如对象访问、进程执行、登录、频率分析等
# 启动新进程开始执行multiprocess函数
userprofile=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (UserProfile_path_list,EvtxDetection.detect_events_UserProfileService_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core))
userprofile.start()
# 将新创建的进程对象添加到process_list列表中以便后续管理或跟踪
process_list.append(userprofile)
# 如果在try块中发生了IOError异常如文件不存在则执行以下代码
except IOError :
# 打印错误信息,指出分析用户配置文件日志时出错,文件路径不存在
print("Error Analyzing User Profile logs: ", end='')
print("File Path Does Not Exist")
# 如果在try块中发生了除IOError之外的其他异常则执行以下代码
except Exception as e:
# 打印错误信息,指出分析用户配置文件日志时出错
print("Error Analyzing User Profile logs")
# 使用logging模块记录异常的详细信息包括堆栈跟踪
# traceback.format_exc()会返回一个包含异常堆栈跟踪信息的字符串
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析安全日志
try:
#print(Security_path)
sec=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (Security_path_list,EvtxDetection.detect_events_security_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
@ -156,8 +123,6 @@ def evtxdetect_auto():
except Exception as e:
print("Error Analyzing Security logs")
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析系统日志
try:
#EvtxDetection.multiprocess(system_path_list,EvtxDetection.detect_events_system_log,input_timezone,timestart,timeend)
sys=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (system_path_list,EvtxDetection.detect_events_system_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
@ -169,8 +134,6 @@ def evtxdetect_auto():
except Exception as e:
print("Error Analyzing System logs ")
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析PowerShell操作日志
try :
#EvtxDetection.multiprocess(powershellop_path_list,EvtxDetection.detect_events_powershell_operational_log,input_timezone,timestart,timeend)
pwshop=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (powershellop_path_list,EvtxDetection.detect_events_powershell_operational_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
@ -182,8 +145,6 @@ def evtxdetect_auto():
except Exception as e:
print("Error Analyzing Powershell Operational logs ")
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析PowerShell日志
try :
#EvtxDetection.multiprocess(powershell_path_list,EvtxDetection.detect_events_powershell_log,input_timezone,timestart,timeend)
pwsh=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (powershell_path_list,EvtxDetection.detect_events_powershell_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
@ -195,8 +156,6 @@ def evtxdetect_auto():
except Exception as e:
print("Error Analyzing Powershell logs ")
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析Terminal Services LocalSessionManager日志
try :
#EvtxDetection.multiprocess(terminal_path_list,EvtxDetection.detect_events_TerminalServices_LocalSessionManager_log,input_timezone,timestart,timeend)
terminal=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (terminal_path_list,EvtxDetection.detect_events_TerminalServices_LocalSessionManager_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
@ -208,8 +167,6 @@ def evtxdetect_auto():
except Exception as e:
print("Error Analyzing TerminalServices LocalSessionManager logs")
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析Terminal Services RDP Client远程桌面协议客户端的日志
try :
#EvtxDetection.multiprocess(terminal_path_list,EvtxDetection.detect_events_TerminalServices_LocalSessionManager_log,input_timezone,timestart,timeend)
terminal_client=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (terminal_Client_path_list,EvtxDetection.detect_events_TerminalServices_RDPClient_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
@ -222,7 +179,6 @@ def evtxdetect_auto():
print("Error Analyzing TerminalServices RDP Client logs")
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析计划任务Scheduled Task的日志
try:
#EvtxDetection.multiprocess(scheduledtask_path_list,EvtxDetection.detect_events_scheduled_task_log,input_timezone,timestart,timeend)
scheduled=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (scheduledtask_path_list,EvtxDetection.detect_events_scheduled_task_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
@ -235,26 +191,24 @@ def evtxdetect_auto():
print("Error Analyzing Scheduled Task logs ")
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析Windows Defender的日志
try:
#EvtxDetection.multiprocess(defender_path_list,EvtxDetection.detect_events_windows_defender_log,input_timezone,timestart,timeend)
defen=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (defender_path_list,EvtxDetection.detect_events_windows_defender_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
defen.start()
process_list.append(defen)
except IOError :
print("Error Analyzing Windows Defender logs : ", end='')
print("File Path Does Not Exist")
except Exception as e:
print("Error Analyzing Windows Defender logs ")
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析Windows远程管理WinRM的日志
try:
#EvtxDetection.multiprocess(winrm_path_list,EvtxDetection.detect_events_Microsoft_Windows_WinRM,input_timezone,timestart,timeend)
winrm=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (winrm_path_list,EvtxDetection.detect_events_Microsoft_Windows_WinRM,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
winrm.start()
process_list.append(winrm)
except IOError :
print("Error Analyzing WinRM logs : ", end='')
print("File Path Does Not Exist")
@ -262,12 +216,12 @@ def evtxdetect_auto():
print("Error Analyzing WinRM logs ")
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析Sysmon系统监控器的日志
try:
#EvtxDetection.multiprocess(sysmon_path_list,EvtxDetection.detect_events_Sysmon_log,input_timezone,timestart,timeend)
sysmon=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (sysmon_path_list,EvtxDetection.detect_events_Sysmon_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
sysmon.start()
process_list.append(sysmon)
except IOError :
print("Error Analyzing Sysmon logs ")
print("File Path Does Not Exist")
@ -275,12 +229,12 @@ def evtxdetect_auto():
print("Error Analyzing Sysmon logs ")
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析组策略Group Policy日志
try:
#EvtxDetection.multiprocess(group_policy_path_list,EvtxDetection.detect_events_group_policy_log,input_timezone,timestart,timeend)
gp=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (group_policy_path_list,EvtxDetection.detect_events_group_policy_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
gp.start()
process_list.append(gp)
except IOError :
print("Error Analyzing Group Policy logs ")
print("File Path Does Not Exist")
@ -288,12 +242,12 @@ def evtxdetect_auto():
# print("Error Analyzing Group Policy logs ")
# logging.error(traceback.format_exc())
#类似上一个try块用于并行分析SMBServer Message Block服务器日志
try:
#EvtxDetection.multiprocess(SMB_SERVER_path_list,EvtxDetection.detect_events_SMB_Server_log,input_timezone,timestart,timeend)
smbserv=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (SMB_SERVER_path_list,EvtxDetection.detect_events_SMB_Server_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
smbserv.start()
process_list.append(smbserv)
except IOError :
print("Error Analyzing SMB Server logs ")
print("File Path Does Not Exist")
@ -301,20 +255,19 @@ def evtxdetect_auto():
print("Error Analyzing Group Policy logs ")
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析SMB客户端日志
try:
#EvtxDetection.multiprocess(SMB_CLIENT_path_list,EvtxDetection.detect_events_SMB_Client_log,input_timezone,timestart,timeend)
smbcli=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (SMB_CLIENT_path_list,EvtxDetection.detect_events_SMB_Client_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
smbcli.start()
process_list.append(smbcli)
except IOError :
print("Error Analyzing SMB Client logs ")
print("File Path Does Not Exist")
except Exception as e:
print("Error Analyzing Group Policy logs ")
logging.error(traceback.format_exc())
#使用了process.join()来等待所有子进程完成
for process in process_list:
process.join()
print("preparing results")
@ -352,72 +305,43 @@ def evtxdetect_auto():
Frequency_Analysis_Sysmon=EvtxDetection.Frequency_Analysis_Sysmon
Frequency_Analysis_SMB_Server=EvtxDetection.Frequency_Analysis_SMB_Server
Frequency_Analysis_TerminalServices=EvtxDetection.Frequency_Analysis_TerminalServices
# 检查临时目录下是否存在名为"_User_SIDs_report.csv"的文件
if os.path.exists(temp_dir + "_User_SIDs_report.csv"):
# 如果文件存在则读取该文件到pandas DataFrame中但随后将DataFrame转换为字典格式其中键是列名值是对应列的数据列表
# 注意原注释掉的代码是直接将CSV文件读取为DataFrame而当前代码是读取后转换为字典
#User_SIDs = pd.DataFrame(pd.read_csv(temp_dir + "_User_SIDs_report.csv"))
User_SIDs = pd.DataFrame(pd.read_csv(temp_dir + "_User_SIDs_report.csv")).to_dict(orient='list')
else:
# 如果文件不存在,则打印一条消息说明文件不存在
# 注意这里的f-string用于格式化字符串将变量temp_dir的值插入到字符串中
print(f"{temp_dir + '_User_SIDs_report.csv'} does not exist.")
#User_SIDs = pd.DataFrame(User_SIDs)
#User_SIDs=EvtxDetection.User_SIDs
resolveSID()
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''———————————————————————————————————————————————————————————————————————————————————我是分割线————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
def auto_detect(path):
global input_timezone
# 编译正则表达式用于匹配EventID, Channel, 和 Computer 标签的内容
EventID_rex = re.compile('<EventID.*>(.*)<\/EventID>', re.IGNORECASE)
Channel_rex = re.compile('<Channel.*>(.*)<\/Channel>', re.IGNORECASE)
Computer_rex = re.compile('<Computer.*>(.*)<\/Computer>', re.IGNORECASE)
# 检查提供的路径是目录还是文件
if os.path.isdir(path):
# 如果是目录使用libPath函数递归查找所有EVTX文件
files=list(libPath(path).rglob("*.[eE][vV][tT][xX]"))
#files=glob.glob(path+"/**/"+"*.evtx")
elif os.path.isfile(path):
# 如果是文件,直接匹配该文件的路径
files=glob.glob(path)
else:
# 如果路径既不是目录也不是文件,则打印错误消息并返回
print("Issue with the path" )
return
#print("hunting ( %s ) in files ( %s )"%(str_regex,files))
#user_string = input('please enter a string to convert to regex: ')
# 遍历找到的文件
for file in files:
file=str(file)
print("Analyzing "+file)
try:
# 尝试使用PyEvtxParser解析文件
parser = PyEvtxParser(file)
except:
# 如果解析文件时出错(例如文件损坏),则打印错误消息并继续下一个文件
print("Issue analyzing "+file +"\nplease check if its not corrupted")
continue
try:
# 遍历解析器中的事件记录
for record in parser.records():
# 使用正则表达式查找记录数据中的通道信息
Channel = Channel_rex.findall(record['data'])
# 检查Channel列表是否非空并获取第一个元素通道名然后去除前后的空白字符
# 根据通道名将文件路径添加到相应的列表中
# 注意由于使用了break语句每个文件只会被添加到第一个匹配的列表中
if Channel[0].strip()=="Security":
Security_path_list.append(file)
break
@ -465,20 +389,8 @@ def auto_detect(path):
break
except:
# 捕获异常并打印错误消息
print("issue assigning path")
evtxdetect_auto()
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''———————————————————————————————————————————————————————————————————————————————————我是分割线————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
def threat_hunt(path,str_regex,eid,hunt_file):
global timestart,timeend,input_timezone, Output
import os
@ -487,17 +399,15 @@ def threat_hunt(path,str_regex,eid,hunt_file):
if 1==1:
if hunt_file is not None:
if os.path.isfile(hunt_file):
print(regex_file) # 这行打印一个空列表,可能是调试用的,可以移除
print(regex_file)
regex_file=open(hunt_file).read().split("\n")
regex_file.remove('')# 读取文件并去除空行和首尾空白
regex_file.remove('')
print(regex_file)
else:
print("Issue with the hunt file path" )
return
# 使用os.path.isdir和os.path.isfile检查路径
if os.path.isdir(path):
files=list(libPath(path).rglob("*.[eE][vV][tT][xX]"))# 确保libPath返回的是一个可以调用rglob的对象
files=list(libPath(path).rglob("*.[eE][vV][tT][xX]"))
elif os.path.isfile(path):
files=glob.glob(path)
@ -505,29 +415,16 @@ def threat_hunt(path,str_regex,eid,hunt_file):
print("Issue with the path" )
return
# 确定要使用的正则表达式列表
#user_string = input('please enter a string to convert to regex: ')
if str_regex is not None:
regex=[str_regex]
elif str_regex is None and len(regex_file)>0:# 如果str_regex为None且regex_file为空可能需要处理这种情况
elif str_regex is None and len(regex_file)>0:
regex=regex_file
print("hunting ( %s ) in files ( %s )"%(regex,files))
EvtxHunt.Evtx_hunt(files,regex,eid,input_timezone,Output,timestart,timeend)
#except Exception as e:
# print("Error in hunting module ")
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''———————————————————————————————————————————————————————————————————————————————————我是分割线————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
def report():
# 定义输出文件的名称
global Output,User_SIDs
timesketch=Output+"_TimeSketch.csv"
Report=Output+"_Report.xlsx"
@ -536,32 +433,26 @@ def report():
ProcessEvents=Output+"_Process_Execution_Events.csv"
Collected_SIDs=Output+"_Collected_SIDs.csv"
print("preparing report")
# 读取用户SID报告
if os.path.exists(temp_dir + "_User_SIDs_report.csv"):
User_SIDs = pd.DataFrame(pd.read_csv(temp_dir + "_User_SIDs_report.csv"))
else:
print(f"{temp_dir + '_User_SIDs_report.csv'} does not exist.")
User_SIDs = pd.DataFrame(User_SIDs)
# 读取Sysmon报告
if os.path.exists(temp_dir + "_Sysmon_report.csv"):
Sysmon = pd.DataFrame(pd.read_csv(temp_dir + "_Sysmon_report.csv"))
else:
print(f"{temp_dir + '_Sysmon_report.csv'} does not exist.")
Sysmon = pd.DataFrame(Sysmon_events[0])
# 读取系统报告
if os.path.exists(temp_dir + "_System_report.csv"):
System = pd.DataFrame(pd.read_csv(temp_dir + "_System_report.csv"))
else:
print(f"{temp_dir + '_System_report.csv'} does not exist.")
System = pd.DataFrame(System_events[0])
# 读取Powershell报告
if os.path.exists(temp_dir + "_Powershell_report.csv"):
Powershell = pd.DataFrame(pd.read_csv(temp_dir + "_Powershell_report.csv"))
else:
print(f"{temp_dir + '_Powershell_report.csv'} does not exist.")
Powershell = pd.DataFrame(Powershell_events[0])
# 以下连续的if-else代码块均实现类似功能读取某指定报告
if os.path.exists(temp_dir + "_Powershell_Operational_report.csv"):
Powershell_Operational = pd.DataFrame(pd.read_csv(temp_dir + "_Powershell_Operational_report.csv"))
else:
@ -697,47 +588,33 @@ def report():
#Object_Access_Events_pd=pd.DataFrame(Object_Access_Events[0])
#ExecutedProcess_Events_pd=pd.DataFrame(Executed_Process_Events[0])
# allresults=pd.DataFrame([TerminalServices,Powershell_Operational],columns=['Date and Time', 'Detection Rule','Detection Domain','Severity','Event Description','Event ID','Original Event Log'])
# 将多个DataFrameScheduledTask, Powershell_Operational, Sysmon, 等合并为一个DataFrame使用内连接inner join并忽略原索引
allresults = pd.concat(
[ScheduledTask, Powershell_Operational, Sysmon, System, Powershell, Security,TerminalClient, TerminalServices, WinRM,
Windows_Defender,GroupPolicy,SMBServer,SMBClient], join="inner", ignore_index=True)
# 重命名DataFrame中的两列将'Date and Time'改为'datetime''Detection Rule'改为'message'
allresults = allresults.rename(columns={'Date and Time': 'datetime', 'Detection Rule': 'message'})
# 在DataFrame中新增一个名为'timestamp_desc'的列,其初始值设为空字符串
allresults['timestamp_desc'] = ""
# 重新排序DataFrame的列按照指定的顺序排列
allresults = allresults[
['message','timestamp', 'datetime', 'timestamp_desc', 'Detection Domain', 'Severity', 'Event Description', 'Event ID',
'Original Event Log','Computer Name','Channel']]
# 计算'Severity'列中每个不同值的出现次数并将结果存储在新的DataFrame中重命名列为'Severity'和'Counts'
Result_Summary_Severity=allresults["Severity"].value_counts().reset_index()
Result_Summary_Severity.columns = ['Severity', 'Counts']
# 计算'message'列中每个不同值的出现次数并将结果存储在新的DataFrame中重命名列为'Detection'和'Counts'
Result_Summary_Detections=allresults["message"].value_counts().reset_index()
Result_Summary_Detections.columns = ['Detection', 'Counts']
# 将allresults DataFrame保存到CSV文件中文件名由timesketch变量指定不保存索引
allresults.to_csv(timesketch, index=False)
# 将User_SIDs DataFrame保存到CSV文件中文件名由Collected_SIDs变量指定不保存索引
User_SIDs.to_csv(Collected_SIDs, index=False)
# 打印信息表明Time Sketch报告已保存文件名由timesketch变量指定
print("Time Sketch Report saved as "+timesketch)
#Logon_Events_pd.to_csv(LogonEvents, index=False)
# 如果logons为True或者allreport为True则打印信息表明Logon事件报告已保存文件名由LogonEvents变量指定
if (logons==True or allreport==True):
print("Logon Events Report saved as "+LogonEvents)
#Object_Access_Events_pd.to_csv(ObjectAccess, index=False)
# 如果objectaccess为True或者allreport为True则打印信息表明对象访问事件报告已保存文件名由ObjectAccess变量指定
if (objectaccess==True or allreport==True):
print("Object Access Events Report saved as "+ObjectAccess)
#ExecutedProcess_Events_pd.to_csv(ProcessEvents, index=False)
# 如果processexec为True或者allreport为True则打印信息表明进程执行事件报告已保存文件名由ProcessEvents变量指定
if (processexec==True or allreport==True):
print("Process Execution Events Report saved as "+ProcessEvents)
# Sysmon=Sysmon.reset_index()
# Sysmon=Sysmon.drop(['index'],axis=1)
#写表格
writer = pd.ExcelWriter(Report, engine='xlsxwriter', engine_kwargs={'options':{'encoding': 'utf-8'}})
Result_Summary_Severity.to_excel(writer, sheet_name='Result Summary', index=False)
Result_Summary_Detections.to_excel(writer, sheet_name='Result Summary' , startrow=len(Result_Summary_Severity)+3, index=False)
@ -800,65 +677,37 @@ def report():
print("Detection Summary :\n############################################\nNumber of incidents by Severity:\n"+allresults["Severity"].value_counts().to_string()+"\n############################################\nNumber of incidents by Detection Rule:\n"+allresults["message"].value_counts().to_string()+"\n\n")
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''———————————————————————————————————————————————————————————————————————————————————我是分割线————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
def convert_list():
# 使用global关键字声明一系列全局变量
global timestart,timeend,User_SIDs,SMB_Server_events,SMB_Client_events,TerminalServices_RDPClient_events,Executed_Process_Events,Group_Policy_events,Object_Access_Events,input_timezone,Logon_Events,Executed_Process_Summary,TerminalServices_Summary,Security_Authentication_Summary,Sysmon_events,WinRM_events,Security_events,System_events,ScheduledTask_events,Powershell_events,Powershell_Operational_events,TerminalServices_events,Windows_Defender_events,Timesketch_events,TerminalServices_Summary,Security_Authentication_Summary,Executed_Powershell_Summary
# 创建一个名为Results的列表其中包含了多个全局变量
Results=[Executed_Powershell_Summary,SMB_Server_events,User_SIDs,SMB_Client_events,TerminalServices_RDPClient_events,Executed_Process_Events,Group_Policy_events,Object_Access_Events,Logon_Events,Executed_Process_Summary,TerminalServices_Summary,Security_Authentication_Summary,Sysmon_events,WinRM_events,Security_events,System_events,ScheduledTask_events,Powershell_events,Powershell_Operational_events,TerminalServices_events,Windows_Defender_events,TerminalServices_Summary,Security_Authentication_Summary
]
# 遍历Results列表中的每个元素
for result in Results:
# 将result[0]的元素中的每个值转换为列表
for i in result[0]:
result[0][i]=list(result[0][i])
def resolveSID():
global TerminalServices_RDPClient_events,WinRM_events,User_SIDs,RDPClient_Resolved_User,WinRM_Resolved_User
# 检查名为_WinRM_events_report.csv的文件是否存在于指定的临时目录temp_dir
if os.path.exists(temp_dir + "_WinRM_events_report.csv"):
# 如果文件存在读取CSV文件内容到DataFrame然后将DataFrame转换为字典列表形式并赋值给WinRM_events[0]
WinRM_events[0] = pd.DataFrame(pd.read_csv(temp_dir + "_WinRM_events_report.csv")).to_dict(orient='list')
# 检查名为_TerminalServices_RDPClient_report.csv的文件是否存在于指定的临时目录temp_dir
if os.path.exists(temp_dir + "_TerminalServices_RDPClient_report.csv"):
# 如果文件存在读取CSV文件内容到DataFrame然后将DataFrame转换为字典列表形式并赋值给TerminalServices_RDPClient_events[0]
TerminalServices_RDPClient_events[0] = pd.DataFrame(pd.read_csv(temp_dir + "_TerminalServices_RDPClient_report.csv")).to_dict(orient='list')
# 初始化RDPClient_Resolved_User列表用于存储解析后的用户信息
RDPClient_Resolved_User=[]
# 初始化WinRM_Resolved_User列表用于存储解析后的用户信息
WinRM_Resolved_User=[]
# 遍历TerminalServices_RDPClient_events[0]字典中"UserID"键对应的值SID列表
for SID in TerminalServices_RDPClient_events[0]["UserID"]:
# 检查当前SID是否存在于User_SIDs字典的"SID"键对应的值中
if SID in User_SIDs["SID"]:
# 如果存在找到对应的用户名称并添加到RDPClient_Resolved_User列表中
RDPClient_Resolved_User.append(User_SIDs["User"][User_SIDs["SID"].index(SID)])
else:
# 如果不存在,将"Could not be resolved"添加到RDPClient_Resolved_User列表中
RDPClient_Resolved_User.append("Could not be resolved")
# 遍历WinRM_events[0]字典中"UserID"键对应的值SID列表
for SID in WinRM_events[0]["UserID"]:
# 检查当前SID是否存在于User_SIDs字典的"SID"键对应的值中
if SID in User_SIDs["SID"]:
# 如果存在找到对应的用户名称并添加到WinRM_Resolved_User列表中
WinRM_Resolved_User.append(User_SIDs["User"][User_SIDs["SID"].index(SID)])
else:
# 如果不存在,将"Could not be resolved"添加到WinRM_Resolved_User列表中
WinRM_Resolved_User.append("Could not be resolved")
#print("user sid"+str(User_SIDs["SID"]))
#print("RDPCLient : "+str(RDPClient_Resolved_User))
#print("WinRM : " + str(WinRM_Resolved_User))
#创建临时路径
def create_temp_dir():
global temp_dir
@ -870,7 +719,6 @@ def create_temp_dir():
else:
print(f"{temp_dir} already exists")
#创建输出路径
def create_out_dir(output):
global temp_dir
@ -885,7 +733,6 @@ def create_out_dir(output):
return output+"/"+output
#清除临时路径
def clean_temp_dir():
global temp_dir
if os.path.exists(temp_dir):
@ -896,13 +743,10 @@ def clean_temp_dir():
os.rmdir(os.path.join(root, name))
os.rmdir(temp_dir)
def main():
# 记录程序开始执行的时间
tic = time.time()
print(Banner)
global CPU_Core,timestart,timeend,Output,objectaccess,Path,processexec,logons,frequencyanalysis,Security_path,system_path,scheduledtask_path,defender_path,powershell_path,powershellop_path,terminal_path,winrm_path,sysmon_path,input_timezone,objectaccess,processexec,logons,frequencyanalysis,allreport
# 创建命令行参数解析器
parser = argparse.ArgumentParser()
parser.add_argument("-p","--path", help="path to folder containing windows event logs , APT-Hunter will detect each log type automatically")
parser.add_argument("-o", "--out",help="output file name")
@ -923,12 +767,10 @@ def main():
parser.add_argument("-rules","--rules", help="path to sigma rules in json format")
#parser.add_argument("-evtfreq","--evtfreq", help="Produce event ID frequency analysis report",action='store_true')
parser.add_argument("-cores","--cores", help="cpu cores to be used in multiprocessing , default is half the number of availble CPU cores")
# 解析命令行参数
args = parser.parse_args()
# 如果指定了输出文件名,则创建输出目录
if args.out is not None:
Output=create_out_dir(args.out)
# 如果没有指定日志路径,则打印错误信息并退出
if (args.path is None ):# and args.security is None and args.system is None and args.scheduledtask is None and args.defender is None and args.powershell is None and args.powershellop is None and args.terminal is None and args.winrm is None and args.sysmon is None):
print("You didn't specify a path for the logs \nuse --help to print help message")
exit()
@ -945,7 +787,6 @@ def main():
allreport=args.allreport
CPU_Core=0
#print(f"all reports value : {allreport}\nlogons value {logons}")
# 尝试解析开始和结束时间,如果格式不正确,则打印错误信息并退出
try:
if args.start is not None and args.end is not None:
timestart=datetime.timestamp(dateutil.parser.isoparse(args.start))
@ -953,20 +794,18 @@ def main():
except:
print("Error parsing time , please use ISO format with timestart and timeend Ex: (2022-04-03T20:56+04:00 or 2022-04-03T20:56 or 2022-04-03 20:56 or 2022-04-03)")
exit()
# 根据命令行参数设置时区
if args.timezone is not None:
if args.timezone.lower()=="local":
input_timezone=tz.tzlocal()
else:
input_timezone=timezone(args.timezone)
# 根据命令行参数设置CPU核心数如果格式不正确则打印错误信息并退出
if args.cores is not None:
try:
CPU_Core=int(args.cores)
except:
print(f"Error using supplied CPU cores {args.cores}")
exit(0)
# 如果启用了Sigma模块则执行Sigma分析
if args.sigma is not False:
if args.rules is not None:
SigmaHunter.Sigma_Analyze(Path,args.rules,Output)
@ -975,7 +814,6 @@ def main():
toc = time.time()
print('Done in {:.4f} seconds'.format(toc-tic))
return
# 如果指定了搜索字符串或正则表达式,则执行威胁搜索
if args.hunt is not None:
if args.eid is not None:
threat_hunt(Path,args.hunt,args.eid,None)
@ -984,7 +822,6 @@ def main():
toc = time.time()
print('Done in {:.4f} seconds'.format(toc-tic))
return
# 如果启用了Office 365日志搜索则执行Office 365分析
if args.o365hunt is not False:
if args.o365rules is not None:
O365Hunter.analyzeoff365(Path, args.o365rules,Output,input_timezone,args.o365raw)
@ -993,7 +830,6 @@ def main():
#toc = time.time()
#print('Done in {:.4f} seconds'.format(toc-tic))
return
# 如果指定了搜索文件,则根据文件中的字符串或正则表达式执行威胁搜索
if args.hunt is None and args.huntfile is not None:
if args.eid is not None:
threat_hunt(Path,None,args.eid,args.huntfile)
@ -1003,7 +839,7 @@ def main():
print('Done in {:.4f} seconds'.format(toc-tic))
return
# 如果没有指定特定的搜索或分析类型,则自动检测日志类型并生成报告
#if args.type is None or args.type=="evtx":
try:
create_temp_dir()
@ -1016,7 +852,6 @@ def main():
clean_temp_dir()
toc = time.time()
# 打印程序执行完毕的时间
print('Analysis finished in {:.4f} seconds'.format(toc-tic))
return

@ -58,7 +58,6 @@ Timesketch_events=[{'message':[],'timestamp':[],'datetime':[],'timestamp_desc':[
def evtxdetect():
#指定提取各种evtx日志文件
global input_timezone,Logon_Events,Executed_Process_Summary,TerminalServices_Summary,Security_Authentication_Summary,Sysmon_events,WinRM_events,Security_events,System_events,ScheduledTask_events,Powershell_events,Powershell_Operational_events,TerminalServices_events,Windows_Defender_events,Timesketch_events,TerminalServices_Summary,Security_Authentication_Summary
try:
print(Security_path)
@ -153,7 +152,6 @@ def evtxdetect():
Logon_Events =EvtxDetection.Logon_Events
def csvdetect(winevent):
#指定提取各种csv日志文件
global Executed_Process_Summary,TerminalServices_Summary,Security_Authentication_Summary,Sysmon_events,WinRM_events,Security_events,System_events,ScheduledTask_events,Powershell_events,Powershell_Operational_events,TerminalServices_events,Windows_Defender_events,Timesketch_events,TerminalServices_Summary,Security_Authentication_Summary
try:
#print(Security_path,winevent)
@ -247,7 +245,6 @@ def csvdetect(winevent):
Security_Authentication_Summary =CSVDetection.Security_Authentication_Summary
def evtxdetect_auto():
#自动提取各种evtx日志文件
global input_timezone,Logon_Events,Executed_Process_Summary,TerminalServices_Summary,Security_Authentication_Summary,Sysmon_events,WinRM_events,Security_events,System_events,ScheduledTask_events,Powershell_events,Powershell_Operational_events,TerminalServices_events,Windows_Defender_events,Timesketch_events,TerminalServices_Summary,Security_Authentication_Summary
try:
#print(Security_path)
@ -343,7 +340,6 @@ def evtxdetect_auto():
def auto_detect(path):
#自动提取所有日志文件
global input_timezone
EventID_rex = re.compile('<EventID.*>(.*)<\/EventID>', re.IGNORECASE)
Channel_rex = re.compile('<Channel.*>(.*)<\/Channel>', re.IGNORECASE)
@ -402,7 +398,6 @@ def auto_detect(path):
print("issue assigning path")
evtxdetect_auto()
def threat_hunt(path,str_regex):
#威胁分析
global input_timezone, Output
import os
@ -419,7 +414,6 @@ def threat_hunt(path,str_regex):
EvtxHunt.Evtx_hunt(files,str_regex,input_timezone,Output)
def report():
#报告生成
global Output
timesketch=Output+"_TimeSketch.csv"
Report=Output+"_Report.xlsx"
@ -473,7 +467,6 @@ def report():
def main():
print(Banner)
global Output,Path,Security_path,system_path,scheduledtask_path,defender_path,powershell_path,powershellop_path,terminal_path,winrm_path,sysmon_path,input_timezone
# 创建命令行参数解析器
parser = argparse.ArgumentParser()
parser.add_argument("-p","--path", help="path to folder containing windows event logs generated by the powershell log collector")
parser.add_argument("-o", "--out",
@ -559,4 +552,6 @@ def main():
if args.type=="csv":
csvdetect(True)
report()
main()

@ -1,24 +1,15 @@
#!/bin/bash
# 检查脚本是否只有一个参数输入
if [ "$#" -ne 1 ]; then
echo "Please enter rules path as argument "
exit 1
fi
# 输出正在克隆Sigma转换工具的信息
echo "Getting Sigma Converter Toot"
# 使用git克隆SigmaHQ的legacy-sigmatools仓库到当前目录
git clone https://github.com/SigmaHQ/legacy-sigmatools.git
# 输出正在转换sigma规则的信息
echo "Converting sigma rules "
# 执行Sigma转换工具将sigma规则文件转换为json格式
# --recurse: 递归处理指定目录下的所有规则文件
# --target sqlite: 指定转换的目标格式为sqlite
# --backend-option table=Events: 指定输出的表名为Events
# -d $1: 指定sigma规则文件的目录为脚本的第一个参数
# -c lib/config/sigma-converter-rules-config.yml: 指定配置文件路径
# -o rules.json: 指定输出文件名为rules.json
# --output-fields: 指定输出的字段内容
legacy-sigmatools/tools/sigmac --recurse --target sqlite --backend-option table=Events --output-format json -d $1 -c lib/config/sigma-converter-rules-config.yml -o rules.json --output-fields title,id,description,author,tags,level,falsepositives,filename,status
# 输出转换完成的信息,包括生成的文件名
echo "Rules created with file name : rules.json "

@ -1,23 +1,11 @@
#!/bin/bash
# 输出转换完成的信息,包括生成的文件名
echo "Getting Sigma Converter Toot"
# 使用git克隆SigmaHQ的legacy-sigmatools仓库到当前目录
git clone https://github.com/SigmaHQ/legacy-sigmatools.git
# 使用git克隆SigmaHQ的legacy-sigmatools仓库到当前目录
echo "Getting Sigma Rules"
# 使用git克隆SigmaHQ的legacy-sigmatools仓库到当前目录
git clone https://github.com/SigmaHQ/sigma.git
# 输出正在转换sigma规则的信息
echo "Converting sigma rules "
# 执行Sigma转换工具将sigma规则文件转换为json格式
# --recurse: 递归处理指定目录下的所有规则文件
# --target sqlite: 指定转换的目标格式为sqlite
# --backend-option table=Events: 指定输出的表名为Events
# -d sigma/rules/windows/: 指定sigma规则文件的目录为sigma仓库中的windows规则目录
# -c lib/config/sigma-converter-rules-config.yml: 指定配置文件路径
# -o rules.json: 指定输出文件名为rules.json
# --output-fields: 指定输出的字段内容
legacy-sigmatools/tools/sigmac --recurse --target sqlite --backend-option table=Events --output-format json -d sigma/rules/windows/ -c lib/config/sigma-converter-rules-config.yml -o rules.json --output-fields title,id,description,author,tags,level,falsepositives,filename,status
# 输出转换完成的信息,包括生成的文件名
echo "Rules created with file name : rules.json "

@ -1,100 +1,99 @@
{
"description": "此 JSON 文件包含与 O365 安全检测相关的规则,每条规则包括名称、严重性等级和查询语句。",
"rules": [
{
"name": "Suspicious User Agent",
"severity": "High",
"query": "SELECT * FROM events WHERE UserAgent LIKE '%python%' OR UserAgent LIKE '%ruler%' OR UserAgent LIKE '%curl%' OR UserAgent LIKE '%Wget%' OR UserAgent LIKE '%python-requests%' OR UserAgent LIKE '%AADInternals%' OR UserAgent LIKE '%azurehound%' OR UserAgent LIKE '%axios%' OR UserAgent LIKE '%BAV2ROPC%' "
},
{
"name": "User adding or removing Inbox Rule",
"severity": "Medium",
"query": "SELECT * FROM events WHERE Operation LIKE '%InboxRule%' OR Operation LIKE 'Set-Mailbox' OR Operation LIKE '%DeliverToMailboxAndForward%' OR Operation LIKE '%ForwardingAddress%' OR Operation LIKE '%ForwardingAddress%' "
},
{
"name": "After Hours Activity",
"severity": "Medium",
"query": "SELECT * FROM events WHERE (CASE WHEN CAST(substr(CreationTime, 12, 2) AS INTEGER) < 0 THEN 24 + (CAST(substr(CreationTime, 12, 2) AS INTEGER)) ELSE CAST(substr(CreationTime, 12, 2) AS INTEGER) END >= 20 OR CASE WHEN CAST(substr(CreationTime, 12, 2) AS INTEGER) < 0 THEN 24 + (CAST(substr(CreationTime, 12, 2) AS INTEGER)) ELSE CAST(substr(CreationTime, 12, 2) AS INTEGER) END < 6) AND NOT (Operation LIKE 'File%' OR Operation LIKE 'List%' OR Operation LIKE 'Page%' OR Operation LIKE '%UserLogin%');"
},
{
"name": "Possible file exfiltration",
"severity": "Low",
"query": "SELECT * FROM events WHERE Operation LIKE '%FileUploaded%' "
},
{
"name": "Admin searching in emails of other users",
"severity": "Low",
"query": "SELECT * FROM events WHERE Operation LIKE '%SearchStarted%' OR Operation LIKE '%SearchExportDownloaded%' OR Operation LIKE '%ViewedSearchExported%' "
},
{
"name": "Strong Authentication Disabled",
"severity": "medium",
"query": "SELECT * FROM events WHERE Operation LIKE '%disable strong authentication%'"
},
{
"name": "User added to admin group",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%add member to group%' AND ModifiedProperties Like '%admin%') OR ( Operation LIKE '%AddedToGroup%' AND TargetUserOrGroupName Like '%admin%') "
},
{
"name": "New Policy created",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%add policy%' ) "
},
{
"name": "Security Alert triggered",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%AlertTriggered%' AND NOT Severity Like '%Low%') "
},
{
"name": "Transport rules ( mail flow rules ) modified",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%TransportRule%') "
},
{
"name": "An application was registered in Azure AD",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%Add service principal.%') "
},
{
"name": "Add app role assignment grant to user",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%Add app role assignment grant to user.%') "
},
{
"name": "eDiscovery Abuse",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%New-ComplianceSearch%') "
},
{
"name": "Operations affecting OAuth Applications",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation = 'Add application.' OR Operation = 'Update application' OR Operation = 'Add service principal.' OR Operation = 'Update application Certificates and secrets management' OR Operation = 'Update applicationUpdate service principal.' OR Operation = 'Add app role assignment grant to user.' OR Operation = 'Add delegated permission grant.' OR Operation = 'Add owner to application.' OR Operation = 'Add owner to service principal.') "
},
{
"name": "Suspicious Operations affecting Mailbox ",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation = 'Set-MailboxJunkEmailConfiguration' OR Operation = 'SoftDelete' OR Operation = 'SendAs' OR Operation = 'HardDelete' OR Operation = 'MoveToDeletedItems' ) "
},
{
"name": "Suspicious Operations affecting SharePoint ",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation = 'AddedToSecureLink' OR Operation = 'SearchQueryPerformed' OR Operation = 'SecureLinkCreated' OR Operation = 'SecureLinkUpdated' OR Operation = 'SharingInvitationCreated' ) "
},
{
"name": "User Modifying RetentionPolicy ",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%UnifiedAuditLogRetentionPolicy%' ) "
},
{
"name": "User Modifying Audit Logging ",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%AdminAuditLogConfig%' ) "
},
{
"name": "String Authentication Disabled ",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%Disable Strong Authentication.%' ) "
}
]
}
[
{
"name": "Suspicious User Agent",
"severity": "High",
"query": "SELECT * FROM events WHERE UserAgent LIKE '%python%' OR UserAgent LIKE '%ruler%' OR UserAgent LIKE '%curl%' OR UserAgent LIKE '%Wget%' OR UserAgent LIKE '%python-requests%' OR UserAgent LIKE '%AADInternals%' OR UserAgent LIKE '%azurehound%' OR UserAgent LIKE '%axios%' OR UserAgent LIKE '%BAV2ROPC%' "
},
{
"name": "User adding or removing Inbox Rule",
"severity": "Medium",
"query": "SELECT * FROM events WHERE Operation LIKE '%InboxRule%' OR Operation LIKE 'Set-Mailbox' OR Operation LIKE '%DeliverToMailboxAndForward%' OR Operation LIKE '%ForwardingAddress%' OR Operation LIKE '%ForwardingAddress%' "
},
{
"name": "After Hours Activity",
"severity": "Medium",
"query": "SELECT * FROM events WHERE (CASE WHEN CAST(substr(CreationTime, 12, 2) AS INTEGER) < 0 THEN 24 + (CAST(substr(CreationTime, 12, 2) AS INTEGER)) ELSE CAST(substr(CreationTime, 12, 2) AS INTEGER) END >= 20 OR CASE WHEN CAST(substr(CreationTime, 12, 2) AS INTEGER) < 0 THEN 24 + (CAST(substr(CreationTime, 12, 2) AS INTEGER)) ELSE CAST(substr(CreationTime, 12, 2) AS INTEGER) END < 6) AND NOT (Operation LIKE 'File%' OR Operation LIKE 'List%' OR Operation LIKE 'Page%' OR Operation LIKE '%UserLogin%');"
},
{
"name": "Possible file exfiltration",
"severity": "Low",
"query": "SELECT * FROM events WHERE Operation LIKE '%FileUploaded%' "
},
{
"name": "Admin searching in emails of other users",
"severity": "Low",
"query": "SELECT * FROM events WHERE Operation LIKE '%SearchStarted%' OR Operation LIKE '%SearchExportDownloaded%' OR Operation LIKE '%ViewedSearchExported%' "
},
{
"name": "Strong Authentication Disabled",
"severity": "medium",
"query": "SELECT * FROM events WHERE Operation LIKE '%disable strong authentication%'"
},
{
"name": "User added to admin group",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%add member to group%' AND ModifiedProperties Like '%admin%') OR ( Operation LIKE '%AddedToGroup%' AND TargetUserOrGroupName Like '%admin%') "
},
{
"name": "New Policy created",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%add policy%' ) "
},
{
"name": "Security Alert triggered",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%AlertTriggered%' AND NOT Severity Like '%Low%') "
},
{
"name": "Transport rules ( mail flow rules ) modified",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%TransportRule%') "
},
{
"name": "An application was registered in Azure AD",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%Add service principal.%') "
},
{
"name": "Add app role assignment grant to user",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%Add app role assignment grant to user.%') "
},
{
"name": "eDiscovery Abuse",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%New-ComplianceSearch%') "
},
{
"name": "Operations affecting OAuth Applications",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation = 'Add application.' OR Operation = 'Update application' OR Operation = 'Add service principal.' OR Operation = 'Update application Certificates and secrets management' OR Operation = 'Update applicationUpdate service principal.' OR Operation = 'Add app role assignment grant to user.' OR Operation = 'Add delegated permission grant.' OR Operation = 'Add owner to application.' OR Operation = 'Add owner to service principal.') "
},
{
"name": "Suspicious Operations affecting Mailbox ",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation = 'Set-MailboxJunkEmailConfiguration' OR Operation = 'SoftDelete' OR Operation = 'SendAs' OR Operation = 'HardDelete' OR Operation = 'MoveToDeletedItems' ) "
},
{
"name": "Suspicious Operations affecting SharePoint ",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation = 'AddedToSecureLink' OR Operation = 'SearchQueryPerformed' OR Operation = 'SecureLinkCreated' OR Operation = 'SecureLinkUpdated' OR Operation = 'SharingInvitationCreated' ) "
},
{
"name": "User Modifying RetentionPolicy ",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%UnifiedAuditLogRetentionPolicy%' ) "
},
{
"name": "User Modifying Audit Logging ",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%AdminAuditLogConfig%' ) "
},
{
"name": "String Authentication Disabled ",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%Disable Strong Authentication.%' ) "
}
]

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

@ -1,13 +1,13 @@
import json # 导入用于处理JSON数据的模块
import sqlite3 # 导入用于操作SQLite数据库的模块
import tempfile # 导入用于创建临时文件和目录的模块
import os # 导入用于操作系统功能的模块
import time # 导入用于处理时间相关功能的模块
import pandas as pd # 导入用于数据处理和分析的Pandas库
import geoip2.database # 导入用于GeoLite2数据库的GeoIP2库
import requests # 导入用于发送HTTP请求的模块
from dateutil import parser, tz # 导入用于解析和处理日期时间的模块
from pathlib import Path # 导入用于处理文件路径的模块
import json
import sqlite3
import tempfile
import os
import time
import pandas as pd
import geoip2.database
import requests
from dateutil import parser, tz
from pathlib import Path
# 初始化全局变量用于计时
start_time = 0
@ -103,22 +103,13 @@ def convert_csv(input_file, temp):
返回:
- json_file: 生成的JSON文件路径
"""
# 创建一个新的JSON文件路径结合临时目录和文件名
json_file = os.path.join(temp, 'audit_data.json')
# 同时打开输入的CSV文件进行读取和新的JSON文件进行写入设置编码为UTF-8
# 使用上下文管理器确保文件正确关闭
with open(input_file, 'r', encoding='utf-8') as csv_file, open(json_file, 'w', encoding='utf-8') as jsonl_file:
# 使用csv.DictReader来读取CSV文件每行会转换为字典
reader = csv.DictReader(csv_file)
# 迭代读取CSV文件的每一行
for row in reader:
# 将CSV文件中'AuditData'字段的字符串解析为JSON对象
json_data = json.loads(row['AuditData'])
# 将JSON对象再次转换为字符串
json_string = json.dumps(json_data)
# 将转换后的JSON字符串写入json文件每行一个JSON对象以换行符结束
jsonl_file.write(json_string + '\n')
# 返回新创建的JSON文件的路径
return json_file
def flatten_json_file(input_file, timezone, chunk_size=10000):
@ -133,30 +124,18 @@ def flatten_json_file(input_file, timezone, chunk_size=10000):
返回:
- DataFrame: 展平后的数据
"""
# 初始化一个空列表用于存储数据块
chunks = []
# 打开输入的JSON文件进行读取
with open(input_file, 'r') as file:
# 读取所有行到一个列表中
lines = file.readlines()
# 按块大小迭代处理行
for i in range(0, len(lines), chunk_size):
# 将当前块的每一行解析为JSON对象
chunk = [json.loads(line) for line in lines[i:i + chunk_size]]
# 处理每个记录
for record in chunk:
# 如果记录中包含'CreationTime'字段
if 'CreationTime' in record:
# 解析'CreationTime'字段为日期时间对象
creation_time = parser.parse(record['CreationTime'])
# 如果日期时间对象没有时区信息设置为UTC
if creation_time.tzinfo is None:
creation_time = creation_time.replace(tzinfo=tz.tzutc())
# 将日期时间对象转换为目标时区并格式化为ISO格式字符串
record['CreationTime'] = creation_time.astimezone(timezone).isoformat()
# 将当前块展平并添加到数据块列表中
chunks.append(pd.json_normalize(chunk))
# 合并所有数据块为一个DataFrame并返回
return pd.concat(chunks, ignore_index=True)
def create_sqlite_db_from_dataframe(dataframe, db_name):
@ -167,14 +146,10 @@ def create_sqlite_db_from_dataframe(dataframe, db_name):
- dataframe: 包含数据的Pandas DataFrame
- db_name: SQLite数据库文件名
"""
# 连接到SQLite数据库如果数据库不存在则会创建
conn = sqlite3.connect(db_name)
# 将DataFrame中的所有列转换为字符串类型
dataframe = dataframe.astype(str)
# 将DataFrame写入SQLite数据库中的'table'表,如果表已存在则替换
dataframe.to_sql('events', conn, if_exists='replace', index=False,
dtype={col_name: 'TEXT' for col_name in dataframe.columns})
# 关闭数据库连接
conn.close()
def read_detection_rules(rule_file):
@ -201,29 +176,17 @@ def apply_detection_logic_sqlite(db_name, rules):
返回:
- DataFrame: 检测到的异常事件
"""
# 连接到SQLite数据库
conn = sqlite3.connect(db_name)
# 初始化一个空列表用于存储所有检测到的事件
all_detected_events = []
# 遍历每个检测规则
for rule in rules:
# 获取规则名称
rule_name = rule['name']
# 获取规则严重性
severity = rule['severity']
# 获取规则的SQL查询
query = rule['query']
# 执行SQL查询并将结果存储到DataFrame中
detected_events = pd.read_sql_query(query, conn)
# 添加规则名称列到DataFrame
detected_events['RuleName'] = rule_name
# 添加严重性列到DataFrame
detected_events['Severity'] = severity
# 将当前规则检测到的事件添加到列表中
all_detected_events.append(detected_events)
# 关闭数据库连接
conn.close()
# 合并所有检测到的事件为一个DataFrame并返回如果没有检测到事件则返回空DataFrame
return pd.concat(all_detected_events, ignore_index=True) if all_detected_events else pd.DataFrame()
def download_geolite_db(geolite_db_path):
@ -271,22 +234,26 @@ def analyzeoff365(auditfile, rule_file, output, timezone, include_flattened_data
- include_flattened_data: 是否包含展平后的数据
- geolite_db_path: GeoLite2数据库文件路径
"""
global start_time, end_time # 声明全局变量start_time和end_time
start_time = time.time() # 记录开始时间
temp_dir = ".temp" # 设置临时目录路径
if output is None or output == "": # 如果输出目录未指定或为空
output = os.path.splitext(auditfile)[0] # 使用审计文件的基础名称作为输出目录
global start_time, end_time
start_time = time.time()
temp_dir = ".temp"
if output is None or output == "":
output = os.path.splitext(auditfile)[0]
try:
os.makedirs(output, exist_ok=True) # 创建输出目录,如果不存在则创建
os.makedirs(temp_dir, exist_ok=True) # 创建临时目录,如果不存在则创建
if not os.path.exists(geolite_db_path): # 如果GeoLite2数据库文件不存在
download_geolite_db(geolite_db_path) # 下载GeoLite2数据库
json_file = convert_csv(auditfile, temp_dir) # 将CSV文件转换为JSON文件
input_file = json_file # 设置输入文件路径为转换后的JSON文件
db_name = os.path.join(temp_dir, 'audit_data.db') # 设置SQLite数据库文件路径
if rule_file is None: # 如果规则文件未指定
rule_file = 'O365_detection_rules.json' # 使用默认的规则文件名
output_file = f"{output}_o365_report.xlsx" # 设置输出的Excel报告文件路径
os.makedirs(output, exist_ok=True)
os.makedirs(temp_dir, exist_ok=True)
if not os.path.exists(geolite_db_path):
download_geolite_db(geolite_db_path)
json_file = convert_csv(auditfile, temp_dir)
input_file = json_file
db_name = os.path.join(temp_dir, 'audit_data.db')
if rule_file is None:
rule_file = 'O365_detection_rules.json'
output_file = f"{output}_o365_report.xlsx"
# 展平JSON数据并处理时间戳
flattened_df = flatten_json_file(input_file, timezone)

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 236 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 86 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 70 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 222 KiB

@ -1,107 +1,101 @@
# 尝试创建一个名为 "wineventlog" 的目录
try {
New-Item -ItemType "directory" -Path "wineventlog"
try{
New-Item -ItemType "directory" -Path "wineventlog"
}
catch {
# 如果创建目录失败,输出错误信息
echo "can't create a new directory"
catch
{
echo "can't create a new directory"
}
# 尝试获取安全日志并导出为 CSV 文件
try {
get-eventlog -log Security | export-csv wineventlog/Security.csv
try{
get-eventlog -log Security | export-csv wineventlog/Security.csv
}
catch {
# 如果获取安全日志失败,输出错误信息
echo "Can't retrieve Security Logs"
catch
{
echo "Can't retrieve Security Logs"
}
# 尝试获取系统日志并导出为 CSV 文件
try {
Get-WinEvent -LogName System | export-csv wineventlog/System.csv
try
{
Get-WinEvent -LogName System | export-csv wineventlog/System.csv
}
catch {
# 如果获取系统日志失败,输出错误信息
echo "Can't retrieve System Logs"
catch
{
echo "Can't retrieve System Logs"
}
# 尝试获取应用程序日志并导出为 CSV 文件
try {
Get-WinEvent -LogName Application | export-csv wineventlog/Application.csv
try{
Get-WinEvent -LogName Application | export-csv wineventlog/Application.csv
}
catch {
# 如果获取应用程序日志失败,输出错误信息
echo "Can't retrieve Application Logs"
catch
{
echo "Can't retrieve Application Logs"
}
# 尝试获取 Windows PowerShell 日志并导出为 CSV 文件
try {
Get-WinEvent -LogName "Windows PowerShell" | export-csv wineventlog/Windows_PowerShell.csv
try{
Get-WinEvent -LogName "Windows PowerShell" | export-csv wineventlog/Windows_PowerShell.csv
}
catch {
# 如果获取 Windows PowerShell 日志失败,输出错误信息
echo "Can't retrieve Windows PowerShell Logs"
catch
{
echo "Can't retrieve Windows PowerShell Logs"
}
# 尝试获取 Microsoft-Windows-TerminalServices-LocalSessionManager/Operational 日志并导出为 CSV 文件
try {
Get-WinEvent -LogName "Microsoft-Windows-TerminalServices-LocalSessionManager/Operational" | export-csv wineventlog/LocalSessionManager.csv
try{
Get-WinEvent -LogName "Microsoft-Windows-TerminalServices-LocalSessionManager/Operational" | export-csv wineventlog/LocalSessionManager.csv
}
catch {
# 如果获取 LocalSessionManager 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-TerminalServices-LocalSessionManager/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-TerminalServices-LocalSessionManager/Operational Logs"
}
# 尝试获取 Microsoft-Windows-Windows Defender/Operational 日志并导出为 CSV 文件
try {
Get-WinEvent -LogName "Microsoft-Windows-Windows Defender/Operational" | export-csv wineventlog/Windows_Defender.csv
try{
Get-WinEvent -LogName "Microsoft-Windows-Windows Defender/Operational" | export-csv wineventlog/Windows_Defender.csv
}
catch {
# 如果获取 Windows Defender 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-Windows Defender/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-Windows Defender/Operational Logs"
}
# 尝试获取 Microsoft-Windows-TaskScheduler/Operational 日志并导出为 CSV 文件
try {
Get-WinEvent -LogName Microsoft-Windows-TaskScheduler/Operational | export-csv wineventlog/TaskScheduler.csv
try{
Get-WinEvent -LogName Microsoft-Windows-TaskScheduler/Operational | export-csv wineventlog/TaskScheduler.csv
}
catch {
# 如果获取 TaskScheduler 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-TaskScheduler/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-TaskScheduler/Operational Logs"
}
# 尝试获取 Microsoft-Windows-WinRM/Operational 日志并导出为 CSV 文件
try {
Get-WinEvent -LogName Microsoft-Windows-WinRM/Operational | export-csv wineventlog/WinRM.csv
try{
Get-WinEvent -LogName Microsoft-Windows-WinRM/Operational | export-csv wineventlog/WinRM.csv
}
catch {
# 如果获取 WinRM 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-WinRM/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-WinRM/Operational Logs"
}
# 尝试获取 Microsoft-Windows-Sysmon/Operational 日志并导出为 CSV 文件
try {
Get-WinEvent -LogName Microsoft-Windows-Sysmon/Operational | export-csv wineventlog/Sysmon.csv
try{
Get-WinEvent -LogName Microsoft-Windows-Sysmon/Operational | export-csv wineventlog/Sysmon.csv
}
catch {
# 如果获取 Sysmon 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-Sysmon/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-Sysmon/Operational Logs"
}
# 尝试获取 Microsoft-Windows-PowerShell/Operational 日志并导出为 CSV 文件
try {
Get-WinEvent -LogName Microsoft-Windows-PowerShell/Operational | export-csv wineventlog/Powershell_Operational.csv
try{
Get-WinEvent -LogName Microsoft-Windows-PowerShell/Operational | export-csv wineventlog/Powershell_Operational.csv
}
catch {
# 如果获取 PowerShell Operational 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-PowerShell/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-PowerShell/Operational Logs"
}
# 尝试压缩 "wineventlog" 目录为 logs.zip
try {
Compress-Archive -Path wineventlog -DestinationPath ./logs.zip
try
{
Compress-Archive -Path wineventlog -DestinationPath ./logs.zip
}
catch {
# 如果压缩失败,输出错误信息
echo "couldn't compress the log folder"
catch
{
echo "couldn't compress the the log folder "
}

@ -1,107 +1,101 @@
# 尝试创建一个名为 "wineventlog" 的目录
try {
New-Item -ItemType "directory" -Path "wineventlog"
try{
New-Item -ItemType "directory" -Path "wineventlog"
}
catch {
# 如果创建目录失败,输出错误信息
echo "can't create a new directory"
catch
{
echo "can't create a new directory"
}
# 尝试导出安全日志到指定的 EVTX 文件
try {
wevtutil epl Security wineventlog/Security.evtx
try{
wevtutil epl Security wineventlog/Security.evtx
}
catch {
# 如果导出安全日志失败,输出错误信息
echo "Can't retrieve Security Logs"
catch
{
echo "Can't retrieve Security Logs"
}
# 尝试导出系统日志到指定的 EVTX 文件
try {
wevtutil epl System wineventlog/System.evtx
try
{
wevtutil epl System wineventlog/System.evtx
}
catch {
# 如果导出系统日志失败,输出错误信息
echo "Can't retrieve System Logs"
catch
{
echo "Can't retrieve System Logs"
}
# 尝试导出应用程序日志到指定的 EVTX 文件
try {
wevtutil epl Application wineventlog/Application.evtx
try{
wevtutil epl Application wineventlog/Application.evtx
}
catch {
# 如果导出应用程序日志失败,输出错误信息
echo "Can't retrieve Application Logs"
catch
{
echo "Can't retrieve Application Logs"
}
# 尝试导出 Windows PowerShell 日志到指定的 EVTX 文件
try {
wevtutil epl "Windows PowerShell" wineventlog/Windows_PowerShell.evtx
try{
wevtutil epl "Windows PowerShell" wineventlog/Windows_PowerShell.evtx
}
catch {
# 如果导出 Windows PowerShell 日志失败,输出错误信息
echo "Can't retrieve Windows PowerShell Logs"
catch
{
echo "Can't retrieve Windows PowerShell Logs"
}
# 尝试导出 Microsoft-Windows-TerminalServices-LocalSessionManager/Operational 日志到指定的 EVTX 文件
try {
wevtutil epl "Microsoft-Windows-TerminalServices-LocalSessionManager/Operational" wineventlog/LocalSessionManager.evtx
try{
wevtutil epl "Microsoft-Windows-TerminalServices-LocalSessionManager/Operational" wineventlog/LocalSessionManager.evtx
}
catch {
# 如果导出 LocalSessionManager 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-TerminalServices-LocalSessionManager/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-TerminalServices-LocalSessionManager/Operational Logs"
}
# 尝试导出 Microsoft-Windows-Windows Defender/Operational 日志到指定的 EVTX 文件
try {
wevtutil epl "Microsoft-Windows-Windows Defender/Operational" wineventlog/Windows_Defender.evtx
try{
wevtutil epl "Microsoft-Windows-Windows Defender/Operational" wineventlog/Windows_Defender.evtx
}
catch {
# 如果导出 Windows Defender 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-Windows Defender/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-Windows Defender/Operational Logs"
}
# 尝试导出 Microsoft-Windows-TaskScheduler/Operational 日志到指定的 EVTX 文件
try {
wevtutil epl Microsoft-Windows-TaskScheduler/Operational wineventlog/TaskScheduler.evtx
try{
wevtutil epl Microsoft-Windows-TaskScheduler/Operational wineventlog/TaskScheduler.evtx
}
catch {
# 如果导出 TaskScheduler 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-TaskScheduler/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-TaskScheduler/Operational Logs"
}
# 尝试导出 Microsoft-Windows-WinRM/Operational 日志到指定的 EVTX 文件
try {
wevtutil epl Microsoft-Windows-WinRM/Operational wineventlog/WinRM.evtx
try{
wevtutil epl Microsoft-Windows-WinRM/Operational wineventlog/WinRM.evtx
}
catch {
# 如果导出 WinRM 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-WinRM/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-WinRM/Operational Logs"
}
# 尝试导出 Microsoft-Windows-Sysmon/Operational 日志到指定的 EVTX 文件
try {
wevtutil epl Microsoft-Windows-Sysmon/Operational wineventlog/Sysmon.evtx
try{
wevtutil epl Microsoft-Windows-Sysmon/Operational wineventlog/Sysmon.evtx
}
catch {
# 如果导出 Sysmon 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-Sysmon/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-Sysmon/Operational Logs"
}
# 尝试导出 Microsoft-Windows-PowerShell/Operational 日志到指定的 EVTX 文件
try {
wevtutil epl Microsoft-Windows-PowerShell/Operational wineventlog/Powershell_Operational.evtx
try{
wevtutil epl Microsoft-Windows-PowerShell/Operational wineventlog/Powershell_Operational.evtx
}
catch {
# 如果导出 PowerShell Operational 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-PowerShell/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-PowerShell/Operational Logs"
}
# 尝试压缩 "wineventlog" 目录为 logs.zip
try {
Compress-Archive -Path wineventlog -DestinationPath ./logs.zip
}
catch {
# 如果压缩失败,输出错误信息
echo "couldn't compress the log folder"
}
try
{
Compress-Archive -Path wineventlog -DestinationPath ./logs.zip
}
catch
{
echo "couldn't compress the the log folder "
}

Loading…
Cancel
Save