重新把所有代码文件清空了

main
lancymorry 7 months ago
parent 58eb17b849
commit aadd24d025

@ -103,48 +103,15 @@ def evtxdetect_auto():
try:
#print(Security_path)
# 创建一个多进程对象用于并行分析用户配置文件UserProfile相关的日志
# target参数指定了新进程要执行的函数args参数传递了执行该函数所需的参数
userprofile = multiprocessing.Process(
target=EvtxDetection.multiprocess, # 要在新进程中执行的函数
args=( # 传递给函数的参数列表
UserProfile_path_list, # 用户配置文件路径的列表
EvtxDetection.detect_events_UserProfileService_log, # 检测事件的函数
input_timezone, # 输入时区信息
timestart, # 分析开始时间
timeend, # 分析结束时间
objectaccess, # 是否分析对象访问事件
processexec, # 是否分析进程执行事件
logons, # 是否分析登录事件
frequencyanalysis, # 是否进行频率分析
allreport, # 是否生成全部报告
Output, # 输出路径或相关配置
CPU_Core # 使用的CPU核心数
)
)#创建多线程分析windows日志其中参数包括用户配置文件的路径列表、一个检测事件的函数、时区信息、时间范围、以及一系列用于分析的选项如对象访问、进程执行、登录、频率分析等
# 启动新进程开始执行multiprocess函数
userprofile=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (UserProfile_path_list,EvtxDetection.detect_events_UserProfileService_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core))
userprofile.start()
# 将新创建的进程对象添加到process_list列表中以便后续管理或跟踪
process_list.append(userprofile)
# 如果在try块中发生了IOError异常如文件不存在则执行以下代码
except IOError :
# 打印错误信息,指出分析用户配置文件日志时出错,文件路径不存在
print("Error Analyzing User Profile logs: ", end='')
print("File Path Does Not Exist")
# 如果在try块中发生了除IOError之外的其他异常则执行以下代码
except Exception as e:
# 打印错误信息,指出分析用户配置文件日志时出错
print("Error Analyzing User Profile logs")
# 使用logging模块记录异常的详细信息包括堆栈跟踪
# traceback.format_exc()会返回一个包含异常堆栈跟踪信息的字符串
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析安全日志
try:
#print(Security_path)
sec=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (Security_path_list,EvtxDetection.detect_events_security_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
@ -156,8 +123,6 @@ def evtxdetect_auto():
except Exception as e:
print("Error Analyzing Security logs")
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析系统日志
try:
#EvtxDetection.multiprocess(system_path_list,EvtxDetection.detect_events_system_log,input_timezone,timestart,timeend)
sys=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (system_path_list,EvtxDetection.detect_events_system_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
@ -169,8 +134,6 @@ def evtxdetect_auto():
except Exception as e:
print("Error Analyzing System logs ")
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析PowerShell操作日志
try :
#EvtxDetection.multiprocess(powershellop_path_list,EvtxDetection.detect_events_powershell_operational_log,input_timezone,timestart,timeend)
pwshop=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (powershellop_path_list,EvtxDetection.detect_events_powershell_operational_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
@ -182,8 +145,6 @@ def evtxdetect_auto():
except Exception as e:
print("Error Analyzing Powershell Operational logs ")
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析PowerShell日志
try :
#EvtxDetection.multiprocess(powershell_path_list,EvtxDetection.detect_events_powershell_log,input_timezone,timestart,timeend)
pwsh=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (powershell_path_list,EvtxDetection.detect_events_powershell_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
@ -195,8 +156,6 @@ def evtxdetect_auto():
except Exception as e:
print("Error Analyzing Powershell logs ")
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析Terminal Services LocalSessionManager日志
try :
#EvtxDetection.multiprocess(terminal_path_list,EvtxDetection.detect_events_TerminalServices_LocalSessionManager_log,input_timezone,timestart,timeend)
terminal=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (terminal_path_list,EvtxDetection.detect_events_TerminalServices_LocalSessionManager_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
@ -208,8 +167,6 @@ def evtxdetect_auto():
except Exception as e:
print("Error Analyzing TerminalServices LocalSessionManager logs")
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析Terminal Services RDP Client远程桌面协议客户端的日志
try :
#EvtxDetection.multiprocess(terminal_path_list,EvtxDetection.detect_events_TerminalServices_LocalSessionManager_log,input_timezone,timestart,timeend)
terminal_client=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (terminal_Client_path_list,EvtxDetection.detect_events_TerminalServices_RDPClient_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
@ -222,7 +179,6 @@ def evtxdetect_auto():
print("Error Analyzing TerminalServices RDP Client logs")
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析计划任务Scheduled Task的日志
try:
#EvtxDetection.multiprocess(scheduledtask_path_list,EvtxDetection.detect_events_scheduled_task_log,input_timezone,timestart,timeend)
scheduled=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (scheduledtask_path_list,EvtxDetection.detect_events_scheduled_task_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
@ -235,26 +191,24 @@ def evtxdetect_auto():
print("Error Analyzing Scheduled Task logs ")
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析Windows Defender的日志
try:
#EvtxDetection.multiprocess(defender_path_list,EvtxDetection.detect_events_windows_defender_log,input_timezone,timestart,timeend)
defen=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (defender_path_list,EvtxDetection.detect_events_windows_defender_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
defen.start()
process_list.append(defen)
except IOError :
print("Error Analyzing Windows Defender logs : ", end='')
print("File Path Does Not Exist")
except Exception as e:
print("Error Analyzing Windows Defender logs ")
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析Windows远程管理WinRM的日志
try:
#EvtxDetection.multiprocess(winrm_path_list,EvtxDetection.detect_events_Microsoft_Windows_WinRM,input_timezone,timestart,timeend)
winrm=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (winrm_path_list,EvtxDetection.detect_events_Microsoft_Windows_WinRM,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
winrm.start()
process_list.append(winrm)
except IOError :
print("Error Analyzing WinRM logs : ", end='')
print("File Path Does Not Exist")
@ -262,12 +216,12 @@ def evtxdetect_auto():
print("Error Analyzing WinRM logs ")
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析Sysmon系统监控器的日志
try:
#EvtxDetection.multiprocess(sysmon_path_list,EvtxDetection.detect_events_Sysmon_log,input_timezone,timestart,timeend)
sysmon=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (sysmon_path_list,EvtxDetection.detect_events_Sysmon_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
sysmon.start()
process_list.append(sysmon)
except IOError :
print("Error Analyzing Sysmon logs ")
print("File Path Does Not Exist")
@ -275,12 +229,12 @@ def evtxdetect_auto():
print("Error Analyzing Sysmon logs ")
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析组策略Group Policy日志
try:
#EvtxDetection.multiprocess(group_policy_path_list,EvtxDetection.detect_events_group_policy_log,input_timezone,timestart,timeend)
gp=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (group_policy_path_list,EvtxDetection.detect_events_group_policy_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
gp.start()
process_list.append(gp)
except IOError :
print("Error Analyzing Group Policy logs ")
print("File Path Does Not Exist")
@ -288,12 +242,12 @@ def evtxdetect_auto():
# print("Error Analyzing Group Policy logs ")
# logging.error(traceback.format_exc())
#类似上一个try块用于并行分析SMBServer Message Block服务器日志
try:
#EvtxDetection.multiprocess(SMB_SERVER_path_list,EvtxDetection.detect_events_SMB_Server_log,input_timezone,timestart,timeend)
smbserv=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (SMB_SERVER_path_list,EvtxDetection.detect_events_SMB_Server_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
smbserv.start()
process_list.append(smbserv)
except IOError :
print("Error Analyzing SMB Server logs ")
print("File Path Does Not Exist")
@ -301,20 +255,19 @@ def evtxdetect_auto():
print("Error Analyzing Group Policy logs ")
logging.error(traceback.format_exc())
#类似上一个try块用于并行分析SMB客户端日志
try:
#EvtxDetection.multiprocess(SMB_CLIENT_path_list,EvtxDetection.detect_events_SMB_Client_log,input_timezone,timestart,timeend)
smbcli=multiprocessing.Process(target= EvtxDetection.multiprocess, args = (SMB_CLIENT_path_list,EvtxDetection.detect_events_SMB_Client_log,input_timezone,timestart,timeend,objectaccess,processexec,logons,frequencyanalysis,allreport,Output,CPU_Core,temp_dir))
smbcli.start()
process_list.append(smbcli)
except IOError :
print("Error Analyzing SMB Client logs ")
print("File Path Does Not Exist")
except Exception as e:
print("Error Analyzing Group Policy logs ")
logging.error(traceback.format_exc())
#使用了process.join()来等待所有子进程完成
for process in process_list:
process.join()
print("preparing results")
@ -352,72 +305,43 @@ def evtxdetect_auto():
Frequency_Analysis_Sysmon=EvtxDetection.Frequency_Analysis_Sysmon
Frequency_Analysis_SMB_Server=EvtxDetection.Frequency_Analysis_SMB_Server
Frequency_Analysis_TerminalServices=EvtxDetection.Frequency_Analysis_TerminalServices
# 检查临时目录下是否存在名为"_User_SIDs_report.csv"的文件
if os.path.exists(temp_dir + "_User_SIDs_report.csv"):
# 如果文件存在则读取该文件到pandas DataFrame中但随后将DataFrame转换为字典格式其中键是列名值是对应列的数据列表
# 注意原注释掉的代码是直接将CSV文件读取为DataFrame而当前代码是读取后转换为字典
#User_SIDs = pd.DataFrame(pd.read_csv(temp_dir + "_User_SIDs_report.csv"))
User_SIDs = pd.DataFrame(pd.read_csv(temp_dir + "_User_SIDs_report.csv")).to_dict(orient='list')
else:
# 如果文件不存在,则打印一条消息说明文件不存在
# 注意这里的f-string用于格式化字符串将变量temp_dir的值插入到字符串中
print(f"{temp_dir + '_User_SIDs_report.csv'} does not exist.")
#User_SIDs = pd.DataFrame(User_SIDs)
#User_SIDs=EvtxDetection.User_SIDs
resolveSID()
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''———————————————————————————————————————————————————————————————————————————————————我是分割线————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
def auto_detect(path):
global input_timezone
# 编译正则表达式用于匹配EventID, Channel, 和 Computer 标签的内容
EventID_rex = re.compile('<EventID.*>(.*)<\/EventID>', re.IGNORECASE)
Channel_rex = re.compile('<Channel.*>(.*)<\/Channel>', re.IGNORECASE)
Computer_rex = re.compile('<Computer.*>(.*)<\/Computer>', re.IGNORECASE)
# 检查提供的路径是目录还是文件
if os.path.isdir(path):
# 如果是目录使用libPath函数递归查找所有EVTX文件
files=list(libPath(path).rglob("*.[eE][vV][tT][xX]"))
#files=glob.glob(path+"/**/"+"*.evtx")
elif os.path.isfile(path):
# 如果是文件,直接匹配该文件的路径
files=glob.glob(path)
else:
# 如果路径既不是目录也不是文件,则打印错误消息并返回
print("Issue with the path" )
return
#print("hunting ( %s ) in files ( %s )"%(str_regex,files))
#user_string = input('please enter a string to convert to regex: ')
# 遍历找到的文件
for file in files:
file=str(file)
print("Analyzing "+file)
try:
# 尝试使用PyEvtxParser解析文件
parser = PyEvtxParser(file)
except:
# 如果解析文件时出错(例如文件损坏),则打印错误消息并继续下一个文件
print("Issue analyzing "+file +"\nplease check if its not corrupted")
continue
try:
# 遍历解析器中的事件记录
for record in parser.records():
# 使用正则表达式查找记录数据中的通道信息
Channel = Channel_rex.findall(record['data'])
# 检查Channel列表是否非空并获取第一个元素通道名然后去除前后的空白字符
# 根据通道名将文件路径添加到相应的列表中
# 注意由于使用了break语句每个文件只会被添加到第一个匹配的列表中
if Channel[0].strip()=="Security":
Security_path_list.append(file)
break
@ -465,20 +389,8 @@ def auto_detect(path):
break
except:
# 捕获异常并打印错误消息
print("issue assigning path")
evtxdetect_auto()
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''———————————————————————————————————————————————————————————————————————————————————我是分割线————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
def threat_hunt(path,str_regex,eid,hunt_file):
global timestart,timeend,input_timezone, Output
import os
@ -487,17 +399,15 @@ def threat_hunt(path,str_regex,eid,hunt_file):
if 1==1:
if hunt_file is not None:
if os.path.isfile(hunt_file):
print(regex_file) # 这行打印一个空列表,可能是调试用的,可以移除
print(regex_file)
regex_file=open(hunt_file).read().split("\n")
regex_file.remove('')# 读取文件并去除空行和首尾空白
regex_file.remove('')
print(regex_file)
else:
print("Issue with the hunt file path" )
return
# 使用os.path.isdir和os.path.isfile检查路径
if os.path.isdir(path):
files=list(libPath(path).rglob("*.[eE][vV][tT][xX]"))# 确保libPath返回的是一个可以调用rglob的对象
files=list(libPath(path).rglob("*.[eE][vV][tT][xX]"))
elif os.path.isfile(path):
files=glob.glob(path)
@ -505,29 +415,16 @@ def threat_hunt(path,str_regex,eid,hunt_file):
print("Issue with the path" )
return
# 确定要使用的正则表达式列表
#user_string = input('please enter a string to convert to regex: ')
if str_regex is not None:
regex=[str_regex]
elif str_regex is None and len(regex_file)>0:# 如果str_regex为None且regex_file为空可能需要处理这种情况
elif str_regex is None and len(regex_file)>0:
regex=regex_file
print("hunting ( %s ) in files ( %s )"%(regex,files))
EvtxHunt.Evtx_hunt(files,regex,eid,input_timezone,Output,timestart,timeend)
#except Exception as e:
# print("Error in hunting module ")
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''———————————————————————————————————————————————————————————————————————————————————我是分割线————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
def report():
# 定义输出文件的名称
global Output,User_SIDs
timesketch=Output+"_TimeSketch.csv"
Report=Output+"_Report.xlsx"
@ -536,32 +433,26 @@ def report():
ProcessEvents=Output+"_Process_Execution_Events.csv"
Collected_SIDs=Output+"_Collected_SIDs.csv"
print("preparing report")
# 读取用户SID报告
if os.path.exists(temp_dir + "_User_SIDs_report.csv"):
User_SIDs = pd.DataFrame(pd.read_csv(temp_dir + "_User_SIDs_report.csv"))
else:
print(f"{temp_dir + '_User_SIDs_report.csv'} does not exist.")
User_SIDs = pd.DataFrame(User_SIDs)
# 读取Sysmon报告
if os.path.exists(temp_dir + "_Sysmon_report.csv"):
Sysmon = pd.DataFrame(pd.read_csv(temp_dir + "_Sysmon_report.csv"))
else:
print(f"{temp_dir + '_Sysmon_report.csv'} does not exist.")
Sysmon = pd.DataFrame(Sysmon_events[0])
# 读取系统报告
if os.path.exists(temp_dir + "_System_report.csv"):
System = pd.DataFrame(pd.read_csv(temp_dir + "_System_report.csv"))
else:
print(f"{temp_dir + '_System_report.csv'} does not exist.")
System = pd.DataFrame(System_events[0])
# 读取Powershell报告
if os.path.exists(temp_dir + "_Powershell_report.csv"):
Powershell = pd.DataFrame(pd.read_csv(temp_dir + "_Powershell_report.csv"))
else:
print(f"{temp_dir + '_Powershell_report.csv'} does not exist.")
Powershell = pd.DataFrame(Powershell_events[0])
# 以下连续的if-else代码块均实现类似功能读取某指定报告
if os.path.exists(temp_dir + "_Powershell_Operational_report.csv"):
Powershell_Operational = pd.DataFrame(pd.read_csv(temp_dir + "_Powershell_Operational_report.csv"))
else:
@ -697,47 +588,33 @@ def report():
#Object_Access_Events_pd=pd.DataFrame(Object_Access_Events[0])
#ExecutedProcess_Events_pd=pd.DataFrame(Executed_Process_Events[0])
# allresults=pd.DataFrame([TerminalServices,Powershell_Operational],columns=['Date and Time', 'Detection Rule','Detection Domain','Severity','Event Description','Event ID','Original Event Log'])
# 将多个DataFrameScheduledTask, Powershell_Operational, Sysmon, 等合并为一个DataFrame使用内连接inner join并忽略原索引
allresults = pd.concat(
[ScheduledTask, Powershell_Operational, Sysmon, System, Powershell, Security,TerminalClient, TerminalServices, WinRM,
Windows_Defender,GroupPolicy,SMBServer,SMBClient], join="inner", ignore_index=True)
# 重命名DataFrame中的两列将'Date and Time'改为'datetime''Detection Rule'改为'message'
allresults = allresults.rename(columns={'Date and Time': 'datetime', 'Detection Rule': 'message'})
# 在DataFrame中新增一个名为'timestamp_desc'的列,其初始值设为空字符串
allresults['timestamp_desc'] = ""
# 重新排序DataFrame的列按照指定的顺序排列
allresults = allresults[
['message','timestamp', 'datetime', 'timestamp_desc', 'Detection Domain', 'Severity', 'Event Description', 'Event ID',
'Original Event Log','Computer Name','Channel']]
# 计算'Severity'列中每个不同值的出现次数并将结果存储在新的DataFrame中重命名列为'Severity'和'Counts'
Result_Summary_Severity=allresults["Severity"].value_counts().reset_index()
Result_Summary_Severity.columns = ['Severity', 'Counts']
# 计算'message'列中每个不同值的出现次数并将结果存储在新的DataFrame中重命名列为'Detection'和'Counts'
Result_Summary_Detections=allresults["message"].value_counts().reset_index()
Result_Summary_Detections.columns = ['Detection', 'Counts']
# 将allresults DataFrame保存到CSV文件中文件名由timesketch变量指定不保存索引
allresults.to_csv(timesketch, index=False)
# 将User_SIDs DataFrame保存到CSV文件中文件名由Collected_SIDs变量指定不保存索引
User_SIDs.to_csv(Collected_SIDs, index=False)
# 打印信息表明Time Sketch报告已保存文件名由timesketch变量指定
print("Time Sketch Report saved as "+timesketch)
#Logon_Events_pd.to_csv(LogonEvents, index=False)
# 如果logons为True或者allreport为True则打印信息表明Logon事件报告已保存文件名由LogonEvents变量指定
if (logons==True or allreport==True):
print("Logon Events Report saved as "+LogonEvents)
#Object_Access_Events_pd.to_csv(ObjectAccess, index=False)
# 如果objectaccess为True或者allreport为True则打印信息表明对象访问事件报告已保存文件名由ObjectAccess变量指定
if (objectaccess==True or allreport==True):
print("Object Access Events Report saved as "+ObjectAccess)
#ExecutedProcess_Events_pd.to_csv(ProcessEvents, index=False)
# 如果processexec为True或者allreport为True则打印信息表明进程执行事件报告已保存文件名由ProcessEvents变量指定
if (processexec==True or allreport==True):
print("Process Execution Events Report saved as "+ProcessEvents)
# Sysmon=Sysmon.reset_index()
# Sysmon=Sysmon.drop(['index'],axis=1)
#写表格
writer = pd.ExcelWriter(Report, engine='xlsxwriter', engine_kwargs={'options':{'encoding': 'utf-8'}})
Result_Summary_Severity.to_excel(writer, sheet_name='Result Summary', index=False)
Result_Summary_Detections.to_excel(writer, sheet_name='Result Summary' , startrow=len(Result_Summary_Severity)+3, index=False)
@ -800,65 +677,37 @@ def report():
print("Detection Summary :\n############################################\nNumber of incidents by Severity:\n"+allresults["Severity"].value_counts().to_string()+"\n############################################\nNumber of incidents by Detection Rule:\n"+allresults["message"].value_counts().to_string()+"\n\n")
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''———————————————————————————————————————————————————————————————————————————————————我是分割线————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
'''————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————————'''
def convert_list():
# 使用global关键字声明一系列全局变量
global timestart,timeend,User_SIDs,SMB_Server_events,SMB_Client_events,TerminalServices_RDPClient_events,Executed_Process_Events,Group_Policy_events,Object_Access_Events,input_timezone,Logon_Events,Executed_Process_Summary,TerminalServices_Summary,Security_Authentication_Summary,Sysmon_events,WinRM_events,Security_events,System_events,ScheduledTask_events,Powershell_events,Powershell_Operational_events,TerminalServices_events,Windows_Defender_events,Timesketch_events,TerminalServices_Summary,Security_Authentication_Summary,Executed_Powershell_Summary
# 创建一个名为Results的列表其中包含了多个全局变量
Results=[Executed_Powershell_Summary,SMB_Server_events,User_SIDs,SMB_Client_events,TerminalServices_RDPClient_events,Executed_Process_Events,Group_Policy_events,Object_Access_Events,Logon_Events,Executed_Process_Summary,TerminalServices_Summary,Security_Authentication_Summary,Sysmon_events,WinRM_events,Security_events,System_events,ScheduledTask_events,Powershell_events,Powershell_Operational_events,TerminalServices_events,Windows_Defender_events,TerminalServices_Summary,Security_Authentication_Summary
]
# 遍历Results列表中的每个元素
for result in Results:
# 将result[0]的元素中的每个值转换为列表
for i in result[0]:
result[0][i]=list(result[0][i])
def resolveSID():
global TerminalServices_RDPClient_events,WinRM_events,User_SIDs,RDPClient_Resolved_User,WinRM_Resolved_User
# 检查名为_WinRM_events_report.csv的文件是否存在于指定的临时目录temp_dir
if os.path.exists(temp_dir + "_WinRM_events_report.csv"):
# 如果文件存在读取CSV文件内容到DataFrame然后将DataFrame转换为字典列表形式并赋值给WinRM_events[0]
WinRM_events[0] = pd.DataFrame(pd.read_csv(temp_dir + "_WinRM_events_report.csv")).to_dict(orient='list')
# 检查名为_TerminalServices_RDPClient_report.csv的文件是否存在于指定的临时目录temp_dir
if os.path.exists(temp_dir + "_TerminalServices_RDPClient_report.csv"):
# 如果文件存在读取CSV文件内容到DataFrame然后将DataFrame转换为字典列表形式并赋值给TerminalServices_RDPClient_events[0]
TerminalServices_RDPClient_events[0] = pd.DataFrame(pd.read_csv(temp_dir + "_TerminalServices_RDPClient_report.csv")).to_dict(orient='list')
# 初始化RDPClient_Resolved_User列表用于存储解析后的用户信息
RDPClient_Resolved_User=[]
# 初始化WinRM_Resolved_User列表用于存储解析后的用户信息
WinRM_Resolved_User=[]
# 遍历TerminalServices_RDPClient_events[0]字典中"UserID"键对应的值SID列表
for SID in TerminalServices_RDPClient_events[0]["UserID"]:
# 检查当前SID是否存在于User_SIDs字典的"SID"键对应的值中
if SID in User_SIDs["SID"]:
# 如果存在找到对应的用户名称并添加到RDPClient_Resolved_User列表中
RDPClient_Resolved_User.append(User_SIDs["User"][User_SIDs["SID"].index(SID)])
else:
# 如果不存在,将"Could not be resolved"添加到RDPClient_Resolved_User列表中
RDPClient_Resolved_User.append("Could not be resolved")
# 遍历WinRM_events[0]字典中"UserID"键对应的值SID列表
for SID in WinRM_events[0]["UserID"]:
# 检查当前SID是否存在于User_SIDs字典的"SID"键对应的值中
if SID in User_SIDs["SID"]:
# 如果存在找到对应的用户名称并添加到WinRM_Resolved_User列表中
WinRM_Resolved_User.append(User_SIDs["User"][User_SIDs["SID"].index(SID)])
else:
# 如果不存在,将"Could not be resolved"添加到WinRM_Resolved_User列表中
WinRM_Resolved_User.append("Could not be resolved")
#print("user sid"+str(User_SIDs["SID"]))
#print("RDPCLient : "+str(RDPClient_Resolved_User))
#print("WinRM : " + str(WinRM_Resolved_User))
#创建临时路径
def create_temp_dir():
global temp_dir
@ -870,7 +719,6 @@ def create_temp_dir():
else:
print(f"{temp_dir} already exists")
#创建输出路径
def create_out_dir(output):
global temp_dir
@ -885,7 +733,6 @@ def create_out_dir(output):
return output+"/"+output
#清除临时路径
def clean_temp_dir():
global temp_dir
if os.path.exists(temp_dir):
@ -896,13 +743,10 @@ def clean_temp_dir():
os.rmdir(os.path.join(root, name))
os.rmdir(temp_dir)
def main():
# 记录程序开始执行的时间
tic = time.time()
print(Banner)
global CPU_Core,timestart,timeend,Output,objectaccess,Path,processexec,logons,frequencyanalysis,Security_path,system_path,scheduledtask_path,defender_path,powershell_path,powershellop_path,terminal_path,winrm_path,sysmon_path,input_timezone,objectaccess,processexec,logons,frequencyanalysis,allreport
# 创建命令行参数解析器
parser = argparse.ArgumentParser()
parser.add_argument("-p","--path", help="path to folder containing windows event logs , APT-Hunter will detect each log type automatically")
parser.add_argument("-o", "--out",help="output file name")
@ -923,12 +767,10 @@ def main():
parser.add_argument("-rules","--rules", help="path to sigma rules in json format")
#parser.add_argument("-evtfreq","--evtfreq", help="Produce event ID frequency analysis report",action='store_true')
parser.add_argument("-cores","--cores", help="cpu cores to be used in multiprocessing , default is half the number of availble CPU cores")
# 解析命令行参数
args = parser.parse_args()
# 如果指定了输出文件名,则创建输出目录
if args.out is not None:
Output=create_out_dir(args.out)
# 如果没有指定日志路径,则打印错误信息并退出
if (args.path is None ):# and args.security is None and args.system is None and args.scheduledtask is None and args.defender is None and args.powershell is None and args.powershellop is None and args.terminal is None and args.winrm is None and args.sysmon is None):
print("You didn't specify a path for the logs \nuse --help to print help message")
exit()
@ -945,7 +787,6 @@ def main():
allreport=args.allreport
CPU_Core=0
#print(f"all reports value : {allreport}\nlogons value {logons}")
# 尝试解析开始和结束时间,如果格式不正确,则打印错误信息并退出
try:
if args.start is not None and args.end is not None:
timestart=datetime.timestamp(dateutil.parser.isoparse(args.start))
@ -953,20 +794,18 @@ def main():
except:
print("Error parsing time , please use ISO format with timestart and timeend Ex: (2022-04-03T20:56+04:00 or 2022-04-03T20:56 or 2022-04-03 20:56 or 2022-04-03)")
exit()
# 根据命令行参数设置时区
if args.timezone is not None:
if args.timezone.lower()=="local":
input_timezone=tz.tzlocal()
else:
input_timezone=timezone(args.timezone)
# 根据命令行参数设置CPU核心数如果格式不正确则打印错误信息并退出
if args.cores is not None:
try:
CPU_Core=int(args.cores)
except:
print(f"Error using supplied CPU cores {args.cores}")
exit(0)
# 如果启用了Sigma模块则执行Sigma分析
if args.sigma is not False:
if args.rules is not None:
SigmaHunter.Sigma_Analyze(Path,args.rules,Output)
@ -975,7 +814,6 @@ def main():
toc = time.time()
print('Done in {:.4f} seconds'.format(toc-tic))
return
# 如果指定了搜索字符串或正则表达式,则执行威胁搜索
if args.hunt is not None:
if args.eid is not None:
threat_hunt(Path,args.hunt,args.eid,None)
@ -984,7 +822,6 @@ def main():
toc = time.time()
print('Done in {:.4f} seconds'.format(toc-tic))
return
# 如果启用了Office 365日志搜索则执行Office 365分析
if args.o365hunt is not False:
if args.o365rules is not None:
O365Hunter.analyzeoff365(Path, args.o365rules,Output,input_timezone,args.o365raw)
@ -993,7 +830,6 @@ def main():
#toc = time.time()
#print('Done in {:.4f} seconds'.format(toc-tic))
return
# 如果指定了搜索文件,则根据文件中的字符串或正则表达式执行威胁搜索
if args.hunt is None and args.huntfile is not None:
if args.eid is not None:
threat_hunt(Path,None,args.eid,args.huntfile)
@ -1003,7 +839,7 @@ def main():
print('Done in {:.4f} seconds'.format(toc-tic))
return
# 如果没有指定特定的搜索或分析类型,则自动检测日志类型并生成报告
#if args.type is None or args.type=="evtx":
try:
create_temp_dir()
@ -1016,7 +852,6 @@ def main():
clean_temp_dir()
toc = time.time()
# 打印程序执行完毕的时间
print('Analysis finished in {:.4f} seconds'.format(toc-tic))
return

@ -58,7 +58,6 @@ Timesketch_events=[{'message':[],'timestamp':[],'datetime':[],'timestamp_desc':[
def evtxdetect():
#指定提取各种evtx日志文件
global input_timezone,Logon_Events,Executed_Process_Summary,TerminalServices_Summary,Security_Authentication_Summary,Sysmon_events,WinRM_events,Security_events,System_events,ScheduledTask_events,Powershell_events,Powershell_Operational_events,TerminalServices_events,Windows_Defender_events,Timesketch_events,TerminalServices_Summary,Security_Authentication_Summary
try:
print(Security_path)
@ -153,7 +152,6 @@ def evtxdetect():
Logon_Events =EvtxDetection.Logon_Events
def csvdetect(winevent):
#指定提取各种csv日志文件
global Executed_Process_Summary,TerminalServices_Summary,Security_Authentication_Summary,Sysmon_events,WinRM_events,Security_events,System_events,ScheduledTask_events,Powershell_events,Powershell_Operational_events,TerminalServices_events,Windows_Defender_events,Timesketch_events,TerminalServices_Summary,Security_Authentication_Summary
try:
#print(Security_path,winevent)
@ -247,7 +245,6 @@ def csvdetect(winevent):
Security_Authentication_Summary =CSVDetection.Security_Authentication_Summary
def evtxdetect_auto():
#自动提取各种evtx日志文件
global input_timezone,Logon_Events,Executed_Process_Summary,TerminalServices_Summary,Security_Authentication_Summary,Sysmon_events,WinRM_events,Security_events,System_events,ScheduledTask_events,Powershell_events,Powershell_Operational_events,TerminalServices_events,Windows_Defender_events,Timesketch_events,TerminalServices_Summary,Security_Authentication_Summary
try:
#print(Security_path)
@ -343,7 +340,6 @@ def evtxdetect_auto():
def auto_detect(path):
#自动提取所有日志文件
global input_timezone
EventID_rex = re.compile('<EventID.*>(.*)<\/EventID>', re.IGNORECASE)
Channel_rex = re.compile('<Channel.*>(.*)<\/Channel>', re.IGNORECASE)
@ -402,7 +398,6 @@ def auto_detect(path):
print("issue assigning path")
evtxdetect_auto()
def threat_hunt(path,str_regex):
#威胁分析
global input_timezone, Output
import os
@ -419,7 +414,6 @@ def threat_hunt(path,str_regex):
EvtxHunt.Evtx_hunt(files,str_regex,input_timezone,Output)
def report():
#报告生成
global Output
timesketch=Output+"_TimeSketch.csv"
Report=Output+"_Report.xlsx"
@ -473,7 +467,6 @@ def report():
def main():
print(Banner)
global Output,Path,Security_path,system_path,scheduledtask_path,defender_path,powershell_path,powershellop_path,terminal_path,winrm_path,sysmon_path,input_timezone
# 创建命令行参数解析器
parser = argparse.ArgumentParser()
parser.add_argument("-p","--path", help="path to folder containing windows event logs generated by the powershell log collector")
parser.add_argument("-o", "--out",
@ -559,4 +552,6 @@ def main():
if args.type=="csv":
csvdetect(True)
report()
main()

@ -1,24 +1,15 @@
#!/bin/bash
# 检查脚本是否只有一个参数输入
if [ "$#" -ne 1 ]; then
echo "Please enter rules path as argument "
exit 1
fi
# 输出正在克隆Sigma转换工具的信息
echo "Getting Sigma Converter Toot"
# 使用git克隆SigmaHQ的legacy-sigmatools仓库到当前目录
git clone https://github.com/SigmaHQ/legacy-sigmatools.git
# 输出正在转换sigma规则的信息
echo "Converting sigma rules "
# 执行Sigma转换工具将sigma规则文件转换为json格式
# --recurse: 递归处理指定目录下的所有规则文件
# --target sqlite: 指定转换的目标格式为sqlite
# --backend-option table=Events: 指定输出的表名为Events
# -d $1: 指定sigma规则文件的目录为脚本的第一个参数
# -c lib/config/sigma-converter-rules-config.yml: 指定配置文件路径
# -o rules.json: 指定输出文件名为rules.json
# --output-fields: 指定输出的字段内容
legacy-sigmatools/tools/sigmac --recurse --target sqlite --backend-option table=Events --output-format json -d $1 -c lib/config/sigma-converter-rules-config.yml -o rules.json --output-fields title,id,description,author,tags,level,falsepositives,filename,status
# 输出转换完成的信息,包括生成的文件名
echo "Rules created with file name : rules.json "

@ -1,23 +1,11 @@
#!/bin/bash
# 输出转换完成的信息,包括生成的文件名
echo "Getting Sigma Converter Toot"
# 使用git克隆SigmaHQ的legacy-sigmatools仓库到当前目录
git clone https://github.com/SigmaHQ/legacy-sigmatools.git
# 使用git克隆SigmaHQ的legacy-sigmatools仓库到当前目录
echo "Getting Sigma Rules"
# 使用git克隆SigmaHQ的legacy-sigmatools仓库到当前目录
git clone https://github.com/SigmaHQ/sigma.git
# 输出正在转换sigma规则的信息
echo "Converting sigma rules "
# 执行Sigma转换工具将sigma规则文件转换为json格式
# --recurse: 递归处理指定目录下的所有规则文件
# --target sqlite: 指定转换的目标格式为sqlite
# --backend-option table=Events: 指定输出的表名为Events
# -d sigma/rules/windows/: 指定sigma规则文件的目录为sigma仓库中的windows规则目录
# -c lib/config/sigma-converter-rules-config.yml: 指定配置文件路径
# -o rules.json: 指定输出文件名为rules.json
# --output-fields: 指定输出的字段内容
legacy-sigmatools/tools/sigmac --recurse --target sqlite --backend-option table=Events --output-format json -d sigma/rules/windows/ -c lib/config/sigma-converter-rules-config.yml -o rules.json --output-fields title,id,description,author,tags,level,falsepositives,filename,status
# 输出转换完成的信息,包括生成的文件名
echo "Rules created with file name : rules.json "

@ -2,115 +2,98 @@
{
"name": "Suspicious User Agent",
"severity": "High",
"query": "SELECT * FROM events WHERE UserAgent LIKE '%python%' OR UserAgent LIKE '%ruler%' OR UserAgent LIKE '%curl%' OR UserAgent LIKE '%Wget%' OR UserAgent LIKE '%python-requests%' OR UserAgent LIKE '%AADInternals%' OR UserAgent LIKE '%azurehound%' OR UserAgent LIKE '%axios%' OR UserAgent LIKE '%BAV2ROPC%'",
// UserAgent
"query": "SELECT * FROM events WHERE UserAgent LIKE '%python%' OR UserAgent LIKE '%ruler%' OR UserAgent LIKE '%curl%' OR UserAgent LIKE '%Wget%' OR UserAgent LIKE '%python-requests%' OR UserAgent LIKE '%AADInternals%' OR UserAgent LIKE '%azurehound%' OR UserAgent LIKE '%axios%' OR UserAgent LIKE '%BAV2ROPC%' "
},
{
"name": "User adding or removing Inbox Rule",
"severity": "Medium",
"query": "SELECT * FROM events WHERE Operation LIKE '%InboxRule%' OR Operation LIKE 'Set-Mailbox' OR Operation LIKE '%DeliverToMailboxAndForward%' OR Operation LIKE '%ForwardingAddress%' OR Operation LIKE '%ForwardingAddress%'",
//
"query": "SELECT * FROM events WHERE Operation LIKE '%InboxRule%' OR Operation LIKE 'Set-Mailbox' OR Operation LIKE '%DeliverToMailboxAndForward%' OR Operation LIKE '%ForwardingAddress%' OR Operation LIKE '%ForwardingAddress%' "
},
{
"name": "After Hours Activity",
"severity": "Medium",
"query": "SELECT * FROM events WHERE (CASE WHEN CAST(substr(CreationTime, 12, 2) AS INTEGER) < 0 THEN 24 + (CAST(substr(CreationTime, 12, 2) AS INTEGER)) ELSE CAST(substr(CreationTime, 12, 2) AS INTEGER) END >= 20 OR CASE WHEN CAST(substr(CreationTime, 12, 2) AS INTEGER) < 0 THEN 24 + (CAST(substr(CreationTime, 12, 2) AS INTEGER)) ELSE CAST(substr(CreationTime, 12, 2) AS INTEGER) END < 6) AND NOT (Operation LIKE 'File%' OR Operation LIKE 'List%' OR Operation LIKE 'Page%' OR Operation LIKE '%UserLogin%');",
// 86
"query": "SELECT * FROM events WHERE (CASE WHEN CAST(substr(CreationTime, 12, 2) AS INTEGER) < 0 THEN 24 + (CAST(substr(CreationTime, 12, 2) AS INTEGER)) ELSE CAST(substr(CreationTime, 12, 2) AS INTEGER) END >= 20 OR CASE WHEN CAST(substr(CreationTime, 12, 2) AS INTEGER) < 0 THEN 24 + (CAST(substr(CreationTime, 12, 2) AS INTEGER)) ELSE CAST(substr(CreationTime, 12, 2) AS INTEGER) END < 6) AND NOT (Operation LIKE 'File%' OR Operation LIKE 'List%' OR Operation LIKE 'Page%' OR Operation LIKE '%UserLogin%');"
},
{
"name": "Possible file exfiltration",
"severity": "Low",
"query": "SELECT * FROM events WHERE Operation LIKE '%FileUploaded%'",
//
"query": "SELECT * FROM events WHERE Operation LIKE '%FileUploaded%' "
},
{
"name": "Admin searching in emails of other users",
"severity": "Low",
"query": "SELECT * FROM events WHERE Operation LIKE '%SearchStarted%' OR Operation LIKE '%SearchExportDownloaded%' OR Operation LIKE '%ViewedSearchExported%'",
//
"query": "SELECT * FROM events WHERE Operation LIKE '%SearchStarted%' OR Operation LIKE '%SearchExportDownloaded%' OR Operation LIKE '%ViewedSearchExported%' "
},
{
"name": "Strong Authentication Disabled",
"severity": "medium",
"query": "SELECT * FROM events WHERE Operation LIKE '%disable strong authentication%'",
//
"query": "SELECT * FROM events WHERE Operation LIKE '%disable strong authentication%'"
},
{
"name": "User added to admin group",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%add member to group%' AND ModifiedProperties Like '%admin%') OR ( Operation LIKE '%AddedToGroup%' AND TargetUserOrGroupName Like '%admin%')",
//
"query": "SELECT * FROM events WHERE ( Operation LIKE '%add member to group%' AND ModifiedProperties Like '%admin%') OR ( Operation LIKE '%AddedToGroup%' AND TargetUserOrGroupName Like '%admin%') "
},
{
"name": "New Policy created",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%add policy%' )",
//
"query": "SELECT * FROM events WHERE ( Operation LIKE '%add policy%' ) "
},
{
"name": "Security Alert triggered",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%AlertTriggered%' AND NOT Severity Like '%Low%')",
//
"query": "SELECT * FROM events WHERE ( Operation LIKE '%AlertTriggered%' AND NOT Severity Like '%Low%') "
},
{
"name": "Transport rules ( mail flow rules ) modified",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%TransportRule%' )",
//
"query": "SELECT * FROM events WHERE ( Operation LIKE '%TransportRule%') "
},
{
"name": "An application was registered in Azure AD",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%Add service principal.%')",
// Azure AD
"query": "SELECT * FROM events WHERE ( Operation LIKE '%Add service principal.%') "
},
{
"name": "Add app role assignment grant to user",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%Add app role assignment grant to user.%')",
//
"query": "SELECT * FROM events WHERE ( Operation LIKE '%Add app role assignment grant to user.%') "
},
{
"name": "eDiscovery Abuse",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%New-ComplianceSearch%')",
// eDiscovery
"query": "SELECT * FROM events WHERE ( Operation LIKE '%New-ComplianceSearch%') "
},
{
"name": "Operations affecting OAuth Applications",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation = 'Add application.' OR Operation = 'Update application' OR Operation = 'Add service principal.' OR Operation = 'Update application Certificates and secrets management' OR Operation = 'Update applicationUpdate service principal.' OR Operation = 'Add app role assignment grant to user.' OR Operation = 'Add delegated permission grant.' OR Operation = 'Add owner to application.' OR Operation = 'Add owner to service principal.')",
// OAuth
"query": "SELECT * FROM events WHERE ( Operation = 'Add application.' OR Operation = 'Update application' OR Operation = 'Add service principal.' OR Operation = 'Update application Certificates and secrets management' OR Operation = 'Update applicationUpdate service principal.' OR Operation = 'Add app role assignment grant to user.' OR Operation = 'Add delegated permission grant.' OR Operation = 'Add owner to application.' OR Operation = 'Add owner to service principal.') "
},
{
"name": "Suspicious Operations affecting Mailbox",
"name": "Suspicious Operations affecting Mailbox ",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation = 'Set-MailboxJunkEmailConfiguration' OR Operation = 'SoftDelete' OR Operation = 'SendAs' OR Operation = 'HardDelete' OR Operation = 'MoveToDeletedItems' )",
//
"query": "SELECT * FROM events WHERE ( Operation = 'Set-MailboxJunkEmailConfiguration' OR Operation = 'SoftDelete' OR Operation = 'SendAs' OR Operation = 'HardDelete' OR Operation = 'MoveToDeletedItems' ) "
},
{
"name": "Suspicious Operations affecting SharePoint",
"name": "Suspicious Operations affecting SharePoint ",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation = 'AddedToSecureLink' OR Operation = 'SearchQueryPerformed' OR Operation = 'SecureLinkCreated' OR Operation = 'SecureLinkUpdated' OR Operation = 'SharingInvitationCreated' )",
// SharePoint
"query": "SELECT * FROM events WHERE ( Operation = 'AddedToSecureLink' OR Operation = 'SearchQueryPerformed' OR Operation = 'SecureLinkCreated' OR Operation = 'SecureLinkUpdated' OR Operation = 'SharingInvitationCreated' ) "
},
{
"name": "User Modifying RetentionPolicy",
"name": "User Modifying RetentionPolicy ",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%UnifiedAuditLogRetentionPolicy%' )",
//
"query": "SELECT * FROM events WHERE ( Operation LIKE '%UnifiedAuditLogRetentionPolicy%' ) "
},
{
"name": "User Modifying Audit Logging",
"name": "User Modifying Audit Logging ",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%AdminAuditLogConfig%' )",
//
"query": "SELECT * FROM events WHERE ( Operation LIKE '%AdminAuditLogConfig%' ) "
},
{
"name": "String Authentication Disabled",
"name": "String Authentication Disabled ",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%Disable Strong Authentication.%' )",
//
"query": "SELECT * FROM events WHERE ( Operation LIKE '%Disable Strong Authentication.%' ) "
}
]
]

@ -234,10 +234,8 @@ def detect_events_security_log(file_name='deep-blue-secuity.csv',winevent=False)
Process_Command_Line = Process_Command_Line_rex.findall(row['Details'])
#User Cretion using Net command
# 用户创建事件处理,使用 Net 命令
if row['Event ID']=="4688":
try:
# 检查事件详情中是否包含用户添加的命令
if len(re.findall('.*user.*/add.*',row['Details']))>0:
#print("test")
@ -246,9 +244,7 @@ def detect_events_security_log(file_name='deep-blue-secuity.csv',winevent=False)
#print("User Name : ( %s ) "%Account_Name[0].strip(),end='')
#print("with Command Line : ( " + Process_Command_Line[0].strip()+" )")
# 生成事件描述
Event_desc ="User Name : ( %s ) "%Account_Name[0].strip()+"with Command Line : ( " + Process_Command_Line[0].strip()+" )"
# 将事件信息添加到 Security_events 数据结构中
Security_events[0]['Date and Time'].append(datetime.strptime(row['Date and Time'],'%m/%d/%Y %I:%M:%S %p').isoformat())
Security_events[0]['timestamp'].append(datetime.timestamp(datetime.strptime(row['Date and Time'],'%m/%d/%Y %I:%M:%S %p')))
Security_events[0]['Detection Rule'].append("User Added using Net Command")
@ -259,7 +255,6 @@ def detect_events_security_log(file_name='deep-blue-secuity.csv',winevent=False)
Security_events[0]['Original Event Log'].append(str(row['Details']).replace("\r", " "))
#Detecting privielge Escalation using Token Elevation
# 检测特权提升尝试,使用命名管道
if len(re.findall(r"cmd.exe /c echo [a-z]{6} > \\\.\\pipe\\\w{1,10}",process_command_line))>0:
Event_desc ="User Name : ( %s ) " % user+"conducting NAMED PIPE privilege escalation with Command Line : ( " + process_command_line + " ) "
@ -272,7 +267,6 @@ def detect_events_security_log(file_name='deep-blue-secuity.csv',winevent=False)
Security_events[0]['Event ID'].append(row['Event ID'])
Security_events[0]['Original Event Log'].append(str(row['Details']).replace("\r", " "))
# 检查进程命令行是否在可疑位置(如 temp、tmp、Program Data
if Process_Command_Line[0].strip().lower().find("\\temp\\")>-1 or Process_Command_Line[0].strip().lower().find("\\tmp\\")>-1 or Process_Command_Line[0].strip().lower().find("\\program data\\")>-1:
# print("test")
@ -291,7 +285,6 @@ def detect_events_security_log(file_name='deep-blue-secuity.csv',winevent=False)
Security_events[0]['Event ID'].append(row['Event ID'])
Security_events[0]['Original Event Log'].append(str(row['Details']).replace("\r", " "))
# 检查是否存在可疑的可执行文件
for i in Suspicious_executables:
if Process_Command_Line[0].strip().lower().find(i.lower())>-1:
@ -311,7 +304,6 @@ def detect_events_security_log(file_name='deep-blue-secuity.csv',winevent=False)
Security_events[0]['Event ID'].append(row['Event ID'])
Security_events[0]['Original Event Log'].append(str(row['Details']).replace("\r", " "))
# 检查是否存在可疑的 PowerShell 命令
for i in Suspicious_powershell_commands:
if Process_Command_Line[0].strip().lower().find(i.lower())>-1:
@ -333,7 +325,6 @@ def detect_events_security_log(file_name='deep-blue-secuity.csv',winevent=False)
except:
# 捕获解析事件时的错误并输出错误信息
print("Error parsing below Event \n"+row['Details'])
continue

@ -746,20 +746,18 @@ def detect_events_security_log(file_name, shared_data):
ObjectProcessName=ObjectProcessName_rex.findall(record['data'])
#Detect any log that contain suspicious process name or argument
# 检测任何包含可疑进程名称或参数的日志
if EventID[0]=="4688" or EventID[0]=="4648" or EventID[0]=="4673":# 检查事件 ID 是否为 4688、4648 或 4673
for i in all_suspicious:# 遍历所有可疑项
if EventID[0]=="4688" or EventID[0]=="4648" or EventID[0]=="4673":
for i in all_suspicious:
if record['data'].lower().find(i.lower())>-1:# 检查日志数据中是否包含可疑项
if record['data'].lower().find(i.lower())>-1:
#print("##### " + record["timestamp"] + " #### ", end='')
#print("## Found Suspicios Process ", end='')
#print("User Name : ( %s ) " % Account_Name[0][0].strip(), end='')
#print("with Command Line : ( " + Process_Command_Line[0][0].strip() + " )")
# print("###########")
# 生成事件描述
Event_desc ="Found a log contain suspicious command or process ( %s)"%i
# 将事件信息添加到 Security_events 数据结构中
Security_events[0]['timestamp'].append(datetime.timestamp(isoparse(parse(record["timestamp"]).astimezone(input_timzone).isoformat())))
Security_events[0]['Computer Name'].append(Computer[0])
Security_events[0]['Channel'].append(Channel[0])
@ -770,14 +768,12 @@ def detect_events_security_log(file_name, shared_data):
Security_events[0]['Event Description'].append(Event_desc)
Security_events[0]['Event ID'].append(EventID[0])
Security_events[0]['Original Event Log'].append(str(record['data']).replace("\r", " "))
break # 找到可疑项后退出循环
break
#User Creation using Net command
# 用户创建事件,使用 Net 命令
if EventID[0]=="4688" or EventID[0]=="4648" or EventID[0]=="4673":
try:
process_name=''
process_command_line=" "
# 获取用户名称
if len(Account_Name[0][0])>0:
user=Account_Name[0][0].strip()
@ -785,7 +781,7 @@ def detect_events_security_log(file_name, shared_data):
if len(Account_Name[0][1])>0:
user=Account_Name[0][1].strip()
process_command_line=Process_Command_Line[0][1].strip()
# 获取进程名称
if len(Process_Command_Line)>0:
process_command_line=Process_Command_Line[0][0].strip()
"""
@ -800,7 +796,7 @@ def detect_events_security_log(file_name, shared_data):
for i in Process_Name[0]:
if len(i)>0:
process_name=i
# 检查日志数据中是否包含用户添加的命令
if len(re.findall('.*user.*/add.*',record['data']))>0:
#print("test")
@ -822,11 +818,9 @@ def detect_events_security_log(file_name, shared_data):
Security_events[0]['Original Event Log'].append(str(record['data']).replace("\r", " "))
#process runing in suspicious location
# 检查进程是否在可疑位置运行
found=0
if process_name.strip() not in Suspicious_process_found:# 检查日志数据中是否包含可疑路径
if process_name.strip() not in Suspicious_process_found:
for i in Suspicious_Path:
# 检查日志数据中是否包含可疑路径
if str(record['data']).lower().find(i.lower())>-1:#process_name.strip().lower().find(i.lower())>-1 or process_command_line.lower().find(i.lower())>-1 :
Suspicious_process_found.append(process_name.strip())
found=1
@ -850,9 +844,8 @@ def detect_events_security_log(file_name, shared_data):
Security_events[0]['Event Description'].append(Event_desc)
Security_events[0]['Event ID'].append(EventID[0])
Security_events[0]['Original Event Log'].append(str(record['data']).replace("\r", " "))
break# 找到可疑路径后退出循环
break
if found!=1:
# 检查进程是否在常规路径中运行
#process runing in suspicious location
found=0
for i in Usual_Path:
@ -882,8 +875,6 @@ def detect_events_security_log(file_name, shared_data):
Security_events[0]['Event ID'].append(EventID[0])
Security_events[0]['Original Event Log'].append(str(record['data']).replace("\r", " "))
found=0
# 检测可疑可执行文件
if len(Process_Command_Line)>0:
#detect suspicious executables
@ -909,7 +900,6 @@ def detect_events_security_log(file_name, shared_data):
Security_events[0]['Original Event Log'].append(str(record['data']).replace("\r", " "))
# detect suspicious powershell commands
# 检测可疑的 PowerShell 命令
for i in Suspicious_powershell_commands:
if process_command_line.lower().find(i.lower())>-1:
@ -934,7 +924,6 @@ def detect_events_security_log(file_name, shared_data):
#Detecting privielge Escalation using Token Elevation
# 检测特权提升尝试,使用命名管道
if len(re.findall(r"cmd.exe /c echo [a-z]{6} > \\\.\\pipe\\\w{1,10}",process_command_line.lower().strip()))>0 or len(re.findall(r"cmd.exe /c echo \w{1,10} .* \\\\\.\\pipe\\\w{1,10}",process_command_line.lower().strip()))>0:
#print("detected",process_command_line.lower().strip())
Event_desc ="User Name : ( %s ) " % user+"conducting Named PIPE privilege escalation with Command Line : ( " + process_command_line + " ) "
@ -954,7 +943,6 @@ def detect_events_security_log(file_name, shared_data):
#print(process_command_line)
#Summary of process Execution
# 进程执行的总结
if EventID[0]=="4688" or EventID[0]=="4648" or EventID[0]=="4673":
try:
#process_name=" "
@ -966,7 +954,6 @@ def detect_events_security_log(file_name, shared_data):
#print(process_name)
#print(Executed_Process_Summary[0]['Process Name'])
#print(process_name not in Executed_Process_Summary[0]['Process Name'])
# 更新执行进程的总结
if process_name not in Executed_Process_Summary[0]['Process Name']:
Executed_Process_Summary[0]['Process Name'].append(process_name.strip())
Executed_Process_Summary[0]['Number of Execution'].append(1)
@ -976,7 +963,6 @@ def detect_events_security_log(file_name, shared_data):
pass
#report of process Execution
# 进程执行的报告
if (processexec==True or allreport==True) and EventID[0]=="4688":
#try:
@ -996,7 +982,6 @@ def detect_events_security_log(file_name, shared_data):
parent_process_name=i
else:
parent_process_name="None"
# 将进程执行事件信息添加到 Executed_Process_Events 数据结构中
Executed_Process_Events[0]['timestamp'].append(datetime.timestamp(isoparse(parse(record["timestamp"]).astimezone(input_timzone).isoformat())))
Executed_Process_Events[0]['DateTime'].append(parse(record["timestamp"]).astimezone(input_timzone).isoformat())
Executed_Process_Events[0]['ProcessName'].append(process_name)
@ -1010,7 +995,6 @@ def detect_events_security_log(file_name, shared_data):
# non-interactive powershell being executed by another application in the background
# 检测非交互式 PowerShell 进程是否由其他应用程序在后台执行
if EventID[0]=="4688" :
try:
#process_name=" "
@ -1022,7 +1006,6 @@ def detect_events_security_log(file_name, shared_data):
if len(i)>0:
parent_process_name=i
# 检查是否为非交互式 PowerShell 进程
if process_name[0].lower().find("powershell.exe")>-1 and parent_process_name[0].lower().find("explorer.exe")==-1:
try:
Event_desc ="User Name : ( %s ) "%user+" executed non-interactive ( " + New_Process_Name[0] + " ) through : ( " + Parent_Process_Name[0] + " ) ."

@ -1,75 +1,72 @@
import csv
import re
from netaddr import * # 导入netaddr库的所有内容用于处理网络地址
import xml.etree.ElementTree as ET # XML解析器
import pandas as pd # 数据分析库
from datetime import datetime, timezone # 日期时间处理
from evtx import PyEvtxParser # 解析Windows事件日志文件的库
from dateutil.parser import parse, isoparse # 解析日期时间字符串
from pytz import timezone # 处理时区
minlength = 1000 # 可能用于某个字符串长度的检查,但在这个文件中未使用
# 初始化一个字典列表,用于存储猎取的事件信息
Hunting_events = [{'Date and Time': [], 'timestamp': [], 'Channel': [], 'Computer': [], 'Event ID': [], 'Original Event Log': []}]
# 正则表达式用于从事件日志中提取特定信息
from netaddr import *
import xml.etree.ElementTree as ET
import pandas as pd
from datetime import datetime , timezone
from evtx import PyEvtxParser
from dateutil.parser import parse
from dateutil.parser import isoparse
from pytz import timezone
minlength=1000
Hunting_events=[{'Date and Time':[],'timestamp':[],'Channel':[],'Computer':[],'Event ID':[],'Original Event Log':[]}]
EventID_rex = re.compile('<EventID.*>(.*)<\/EventID>', re.IGNORECASE)
Channel_rex = re.compile('<Channel.*>(.*)<\/Channel>', re.IGNORECASE)
Computer_rex = re.compile('<Computer.*>(.*)<\/Computer>', re.IGNORECASE)
def Evtx_hunt(files, str_regexes, eid, input_timzone, output, timestart, timeend):
"""
解析并搜索Windows事件日志文件中的特定事件
def Evtx_hunt(files,str_regexes,eid,input_timzone,output,timestart,timeend):
参数:
- files: 要解析的事件日志文件列表
- str_regexes: 用于匹配事件数据的正则表达式列表
- eid: 事件ID如果提供则只搜索此ID的事件
- input_timzone: 输入日志的时区
- output: 输出文件名
- timestart, timeend: 搜索时间范围
"""
for file in files:
file = str(file)
print("Analyzing " + file)
file=str(file)
print("Analyzing "+file)
try:
parser = PyEvtxParser(file)
except:
print("Issue analyzing " + file + "\nplease check if its not corrupted")
print("Issue analyzing "+file +"\nplease check if its not corrupted")
continue
for record in parser.records():
try:
# 提取事件ID
try:
for record in parser.records():
EventID = EventID_rex.findall(record['data'])
# 如果提供了时间范围,则检查事件是否在该范围内
if timestart is not None and timeend is not None:
timestamp = datetime.timestamp(isoparse(parse(record["timestamp"]).astimezone(input_timzone).isoformat()))
if not (timestamp > timestart and timestamp < timeend):
continue # 事件不在时间范围内,跳过
# 如果有EventID并且匹配eid如果eid不为None
if len(EventID) > 0 and (eid is None or EventID[0] == eid):
return
if len(EventID) > 0:
if eid is not None and EventID[0]!=eid:
continue
Computer = Computer_rex.findall(record['data'])
Channel = Channel_rex.findall(record['data'])
channel = Channel[0] if len(Channel) > 0 else " "
# 遍历所有提供的正则表达式
if len(Channel)>0:
channel=Channel[0]
else:
channel=" "
#print(record['data'])
# if record['data'].lower().find(str_regex.lower())>-1:
#print(str_regexes)
for str_regex in str_regexes:
rex = re.compile(str_regex, re.IGNORECASE)
rex=re.compile(str_regex, re.IGNORECASE)
#print(rex)
#print(rex.findall(record['data']))
if rex.findall(record['data']):
# 如果匹配到正则表达式,记录事件信息
#print("EventID : "+EventID[0]+" , Data : "+record['data'])
Hunting_events[0]['timestamp'].append(datetime.timestamp(isoparse(parse(record["timestamp"]).astimezone(input_timzone).isoformat())))
Hunting_events[0]['Date and Time'].append(parse(record["timestamp"]).astimezone(input_timzone).isoformat())
Hunting_events[0]['Channel'].append(channel)
Hunting_events[0]['Event ID'].append(EventID[0])
Hunting_events[0]['Computer'].append(Computer[0])
Hunting_events[0]['Original Event Log'].append(str(record['data']).replace("\r", " ").replace("\n", " "))
except Exception as e:
print("issue searching log : " + record['data'] + "\n Error : " + str(e)) # 修正了错误的打印函数调用
except Exception as e:
print("issue searching log : "+record['data']+"\n Error : "+print(e))
hunt_report(output)
def hunt_report(output):
"""
生成猎取事件的报告
参数:
- output: 输出CSV文件的前缀
"""
global Hunting_events
Events = pd.DataFrame(Hunting_events[0])
print("Found " + str(len(Hunting_events[0]["timestamp"])) + " Events")
Events.to_csv(output + "_hunting.csv", index=False)
print("Found "+str(len(Hunting_events[0]["timestamp"]))+" Events")
Events.to_csv(output+"_hunting.csv", index=False)

@ -7,23 +7,25 @@ import pandas as pd
import geoip2.database
import requests
from dateutil import parser, tz
import pandas as pd
import json
import csv
from pathlib import Path
# 初始化全局变量用于计时
start_time = 0
end_time = 0
# SQL查询语句用于检测密码喷洒攻击
start_time=0
end_time=0
password_spray_query = '''
WITH FailedLogins AS (
SELECT
UserId,
ClientIP,
ClientIP,
datetime(CreationTime) AS LoginDate
FROM
events
WHERE
Operation = 'UserLoginFailed'
)
SELECT
UserId,
@ -31,18 +33,18 @@ SELECT
COUNT(DISTINCT ClientIP) AS UniqueIPCount,
COUNT(*) AS FailedLoginAttempts,
LoginDate
FROM
FailedLogins
GROUP BY
UserId,
strftime('%Y-%m-%d %H', LoginDate)
strftime('%Y-%m-%d %H', LoginDate)
HAVING
COUNT(*) > 5 AND UniqueIPCount > 3
ORDER BY
FailedLoginAttempts DESC;
'''
'''
# SQL查询语句用于跟踪用户登录活动
user_logon_query = '''
SELECT
UserId,
@ -50,19 +52,18 @@ SELECT
COUNT(*) AS TotalLoginAttempts,
SUM(CASE WHEN Operation = 'UserLoggedIn' THEN 1 ELSE 0 END) AS SuccessfulLogins,
SUM(CASE WHEN Operation = 'UserLoginFailed' THEN 1 ELSE 0 END) AS FailedLogins
FROM
events
WHERE
FROM
events
where
Operation = 'UserLoggedIn' OR Operation = 'UserLoginFailed'
GROUP BY
UserId,
LoginDate
ORDER BY
LoginDate,
UserId;
GROUP BY
UserId,
LoginDate
ORDER BY
LoginDate,
UserId;
'''
# SQL查询语句用于统计用户执行的操作
User_operations_query = '''
SELECT
UserId,
@ -76,13 +77,12 @@ ORDER BY
OperationCount DESC;
'''
# SQL查询语句用于按天统计用户操作
user_operation_by_day_query = '''
SELECT
UserId,
DATE(CreationTime) AS OperationDate,
COUNT(DISTINCT Operation) AS OperationCount,
GROUP_CONCAT(Operation, ', ') AS UniqueOperations
GROUP_CONCAT( Operation, ', ') AS UniqueOperations
FROM
events
GROUP BY
@ -92,162 +92,138 @@ ORDER BY
OperationCount DESC
'''
def convert_csv(input_file, temp):
"""
将CSV文件转换为JSON格式的文件
参数:
- input_file: 输入的CSV文件路径
- temp: 临时目录路径
返回:
- json_file: 生成的JSON文件路径
"""
json_file = os.path.join(temp, 'audit_data.json')
with open(input_file, 'r', encoding='utf-8') as csv_file, open(json_file, 'w', encoding='utf-8') as jsonl_file:
def convert_csv(input_file,temp):
with open(input_file, 'r', encoding='utf-8') as csv_file:
# Create a CSV reader
reader = csv.DictReader(csv_file)
for row in reader:
json_data = json.loads(row['AuditData'])
json_string = json.dumps(json_data)
jsonl_file.write(json_string + '\n')
return json_file
def flatten_json_file(input_file, timezone, chunk_size=10000):
"""
将JSON文件展平并处理时间戳
json_file = 'audit_data.json'
json_file=os.path.join(temp, json_file)
with open(json_file, 'w', encoding='utf-8') as jsonl_file:
# Extract and write the AuditData column to a file as JSON Lines
for row in reader:
# Extract the AuditData which is already a JSON formatted string
json_data = json.loads(row['AuditData'])
# Convert the JSON object back to a string to store in the file
json_string = json.dumps(json_data)
# Write the JSON string to the file with a newline
jsonl_file.write(json_string + '\n')
return json_file
参数:
- input_file: 输入的JSON文件路径
- timezone: 目标时区
- chunk_size: 处理的块大小
返回:
- DataFrame: 展平后的数据
"""
def flatten_json_file(input_file, timezone, chunk_size=10000):
# Read the JSON file in chunks
chunks = []
with open(input_file, 'r') as file:
lines = file.readlines()
for i in range(0, len(lines), chunk_size):
chunk = [json.loads(line) for line in lines[i:i + chunk_size]]
# Convert the CreationTime to the desired timezone
for record in chunk:
if 'CreationTime' in record:
# Parse the CreationTime
creation_time = parser.parse(record['CreationTime'])
# Check if the datetime object is timezone aware
if creation_time.tzinfo is None:
# Assume the original time is in UTC if no timezone info is present
creation_time = creation_time.replace(tzinfo=tz.tzutc())
# Convert the CreationTime to the desired timezone
record['CreationTime'] = creation_time.astimezone(timezone).isoformat()
chunks.append(pd.json_normalize(chunk))
return pd.concat(chunks, ignore_index=True)
def create_sqlite_db_from_dataframe(dataframe, db_name):
"""
从Pandas DataFrame创建SQLite数据库
# Concatenate all chunks into a single DataFrame
flattened_records = pd.concat(chunks, ignore_index=True)
参数:
- dataframe: 包含数据的Pandas DataFrame
- db_name: SQLite数据库文件名
"""
return flattened_records
def create_sqlite_db_from_dataframe(dataframe, db_name):
conn = sqlite3.connect(db_name)
# Convert all columns to string
dataframe = dataframe.astype(str)
# Write the DataFrame to SQLite, treating all fields as text
dataframe.to_sql('events', conn, if_exists='replace', index=False,
dtype={col_name: 'TEXT' for col_name in dataframe.columns})
conn.close()
def read_detection_rules(rule_file):
"""
从文件中读取检测规则
conn.close()
参数:
- rule_file: 包含检测规则的JSON文件路径
返回:
- rules: 规则列表
"""
def read_detection_rules(rule_file):
with open(rule_file, 'r') as file:
return json.load(file)
def apply_detection_logic_sqlite(db_name, rules):
"""
应用检测逻辑到SQLite数据库
rules = json.load(file)
return rules
参数:
- db_name: SQLite数据库文件名
- rules: 检测规则列表
返回:
- DataFrame: 检测到的异常事件
"""
def apply_detection_logic_sqlite(db_name, rules):
conn = sqlite3.connect(db_name)
all_detected_events = []
for rule in rules:
rule_name = rule['name']
severity = rule['severity']
query = rule['query']
detected_events = pd.read_sql_query(query, conn)
detected_events['RuleName'] = rule_name
detected_events['Severity'] = severity
all_detected_events.append(detected_events)
conn.close()
return pd.concat(all_detected_events, ignore_index=True) if all_detected_events else pd.DataFrame()
def download_geolite_db(geolite_db_path):
"""
下载GeoLite2数据库用于IP地理定位
if all_detected_events:
result = pd.concat(all_detected_events, ignore_index=True)
else:
result = pd.DataFrame()
return result
参数:
- geolite_db_path: 保存GeoLite2数据库的路径
"""
def download_geolite_db(geolite_db_path):
url = "https://git.io/GeoLite2-Country.mmdb"
print(f"Downloading GeoLite2 database from {url}...")
response = requests.get(url)
response.raise_for_status()
response.raise_for_status() # Check if the download was successful
with open(geolite_db_path, 'wb') as file:
file.write(response.content)
print(f"GeoLite2 database downloaded and saved to {geolite_db_path}")
def get_country_from_ip(ip, reader):
"""
根据IP地址获取国家名称
参数:
- ip: IP地址
- reader: GeoLite2数据库的读取器
返回:
- str: 国家名称或'Unknown'如果无法解析
"""
try:
return reader.country(ip).country.name
response = reader.country(ip)
return response.country.name
except Exception as e:
print(f"Could not resolve IP {ip}: {e}")
#print(f"Could not resolve IP {ip}: {e}")
return 'Unknown'
def analyzeoff365(auditfile, rule_file, output, timezone, include_flattened_data=False,
geolite_db_path='GeoLite2-Country.mmdb'):
"""
分析Office 365审计日志并生成报告
参数:
- auditfile: Office 365审计日志文件路径
- rule_file: 检测规则文件路径
- output: 输出目录
- timezone: 目标时区
- include_flattened_data: 是否包含展平后的数据
- geolite_db_path: GeoLite2数据库文件路径
"""
global start_time, end_time
start_time = time.time()
temp_dir = ".temp"
if output is None or output == "":
output = os.path.splitext(auditfile)[0]
try:
# Create necessary directories
os.makedirs(output, exist_ok=True)
os.makedirs(temp_dir, exist_ok=True)
# Check if the GeoLite2 database exists, and download it if not
if not os.path.exists(geolite_db_path):
download_geolite_db(geolite_db_path)
# Convert CSV to JSON (assuming convert_csv is a valid function that you have)
json_file = convert_csv(auditfile, temp_dir)
# Input and output file paths
input_file = json_file
db_name = os.path.join(temp_dir, 'audit_data.db')
@ -255,28 +231,36 @@ def analyzeoff365(auditfile, rule_file, output, timezone, include_flattened_data
rule_file = 'O365_detection_rules.json'
output_file = f"{output}_o365_report.xlsx"
# 展平JSON数据并处理时间戳
# Measure the start time
# Flatten the JSON file
flattened_df = flatten_json_file(input_file, timezone)
# 创建SQLite数据库
# Create SQLite database from the flattened DataFrame
create_sqlite_db_from_dataframe(flattened_df, db_name)
# 使用GeoLite2数据库解析IP地址
# Open the GeoLite2 database
with geoip2.database.Reader(geolite_db_path) as reader:
# Resolve ClientIP to country names
if 'ClientIP' in flattened_df.columns:
flattened_df['Country'] = flattened_df['ClientIP'].apply(lambda ip: get_country_from_ip(ip, reader))
# 读取检测规则并应用
# Read detection rules
rules = read_detection_rules(rule_file)
# Apply detection logic using SQLite
detected_events = apply_detection_logic_sqlite(db_name, rules)
# 重新排序DataFrame列以便RuleName在前
# Reorder columns to make RuleName the first column
if not detected_events.empty:
columns = ['RuleName', 'Severity'] + [col for col in detected_events.columns if col not in ['RuleName', 'Severity']]
columns = ['RuleName', 'Severity'] + [col for col in detected_events.columns if
col not in ['RuleName', 'Severity']]
detected_events = detected_events[columns]
# 执行其他SQL查询
# Perform the brute-force detection query
conn = sqlite3.connect(db_name)
try:
user_login_tracker_df = pd.read_sql_query(user_logon_query, conn)
password_spray_df = pd.read_sql_query(password_spray_query, conn)
@ -285,19 +269,20 @@ def analyzeoff365(auditfile, rule_file, output, timezone, include_flattened_data
finally:
conn.close()
# 生成Excel报告
# Create a new workbook with the detection results
with pd.ExcelWriter(output_file, engine='xlsxwriter') as writer:
if include_flattened_data:
# 将展平后的数据分成多个工作表
# Split the flattened data into multiple sheets if needed
max_rows_per_sheet = 65000
num_sheets = len(flattened_df) // max_rows_per_sheet + 1
for i in range(num_sheets):
start_row = i * max_rows_per_sheet
end_row = (i + 1) * max_rows_per_sheet
sheet_name = f'Flattened Data {i + 1}'
flattened_df.iloc[start_row:end_row].to_excel(writer, sheet_name=sheet_name, index=False)
# 写入各种统计信息到不同的工作表
# Write statistics for various fields
detected_events.to_excel(writer, sheet_name='Detection Results', index=False)
user_login_tracker_df.to_excel(writer, sheet_name='User Login Tracker', index=False)
password_spray_df.to_excel(writer, sheet_name='Password Spray Attacks', index=False)
@ -308,8 +293,10 @@ def analyzeoff365(auditfile, rule_file, output, timezone, include_flattened_data
flattened_df['Country'].value_counts().to_frame().to_excel(writer, sheet_name='Country Stats')
flattened_df['UserAgent'].value_counts().to_frame().to_excel(writer, sheet_name='UserAgent Stats')
flattened_df['UserId'].value_counts().to_frame().to_excel(writer, sheet_name='UserId Stats')
flattened_df['AuthenticationType'].value_counts().to_frame().to_excel(writer, sheet_name='AuthenticationType Stats')
flattened_df['AuthenticationType'].value_counts().to_frame().to_excel(writer,
sheet_name='AuthenticationType Stats')
# Measure the end time
end_time = time.time()
print(f"Office365 analysis finished in time: {end_time - start_time:.2f} seconds")
@ -317,12 +304,18 @@ def analyzeoff365(auditfile, rule_file, output, timezone, include_flattened_data
print(f"An error occurred during the analysis: {e}")
finally:
# 清理临时目录
#Clean up the temporary directory
if os.path.exists(temp_dir):
for file in Path(temp_dir).glob('*'):
file.unlink()
os.rmdir(temp_dir)
file.unlink() # Delete the file
os.rmdir(temp_dir) # Remove the directory
# Write the User Login Tracker results to a new sheet
# Measure the end time
end_time = time.time()
# Calculate and print the running time
running_time = end_time - start_time
print(f"Office365 hunter finished in time: {running_time:.2f} seconds")

@ -493,115 +493,71 @@ def optimised_parse_mp(file):
'ParentUser': ['Event_EventData_ParentUser']}
parser = PyEvtxParser(str(file))
for record in parser.records_json():
# 将JSON格式的事件数据展平方便后续处理
data = flatten(json.loads(record["data"]))
for key in mapping.keys():
requiredfield = "None"
# 遍历mapping中的字段找到第一个在数据中存在的字段
for field in mapping[key]:
if field in data:
requiredfield = field
break
if requiredfield != "None":
# 如果找到的字段值是一个列表则将列表中的值以逗号分隔并加入到Alldata中
if isinstance(data[requiredfield], list):
Alldata[key].append(",".join(data[requiredfield]))
else:
# 如果不是列表直接将值转换为字符串并加入到Alldata中
Alldata[key].append(str(data[requiredfield]))
else:
# 如果没有找到任何匹配的字段
if field == "Original_Event_Log":
# 对于原始事件日志将整个事件数据加入到Alldata中
Alldata[key].append(record["data"])
for record in parser.records_json():
data=flatten(json.loads(record["data"]))
for key in mapping.keys():
requiredfield = "None"
for field in mapping[key]:
if field in data:
requiredfield=field
break
if requiredfield!="None":
if isinstance(data[requiredfield], list):
Alldata[key].append(",".join(data[requiredfield]))
else:
Alldata[key].append(str(data[requiredfield]))
else:
# 对于其他未找到的字段添加None值
Alldata[key].append(None)
if field == "Original_Event_Log":
Alldata[key].append(record["data"])
#Alldata[key].append(None)
else:
Alldata[key].append(None)
#print("finished Parsing")
#print(Alldata)
# 使用锁来确保多进程环境下的数据插入是线程安全的
l.acquire()
#print("Inserting data into "+DB)
insert_into_db_mp(Alldata, DB)
l.release()
print("Done Parsing : " + str(file))
print("Done Parsing : "+str(file))
def clean(DBName):
"""
清理指定的SQLite数据库文件
参数:
- DBName: SQLite数据库文件名
"""
def clean(DBName):
# specify the path to the file to be removed
file_path = DBName
# 检查文件是否存在
# check if the file exists
if os.path.isfile(file_path):
# 删除文件
# remove the file
os.remove(file_path)
print(f"Temp Database has been removed.")
else:
print(f"Temp Database does not exist.")
def init(l):
"""
初始化进程的全局锁
参数:
- l: 锁对象
"""
def init(l):
global lock
lock = l
def Sigma_Analyze(Path, rules, output, DBName="Events.sqlite"):
"""
使用Sigma规则分析Windows事件日志
参数:
- Path: 事件日志文件路径
- rules: Sigma规则文件路径
- output: 输出文件名前缀
- DBName: SQLite数据库文件名
"""
global l, DBconn, DB
def Sigma_Analyze(Path, rules,output, DBName="Events.sqlite"):
global l,DBconn,DB
tic_start = time.time()
DB = DBName
# 创建SQLite数据库
DB=DBName
Create_DB(DB)
print("Analyzing logs using Sigma with below config : ")
print(f"Logs Path : {Path}\nSigma Rules file : {rules}\nProfile : {output}")
# 使用多进程加速处理
pool = multiprocessing.Pool(multiprocessing.cpu_count(), initializer=init, initargs=(l,))
# 自动检测日志文件
files = auto_detect(Path)
# 多进程解析日志文件
results = pool.map(optimised_parse_mp, files)
# 将Sigma规则插入数据库
RulesToDB(rules, DB)
# 连接到数据库
DBconn = sqlite3.connect(DB)
# 优化搜索应用Sigma规则
optimised_search(DB, output)
# 清理临时数据库
optimised_search(DB,output)
clean(DBName)
# 关闭数据库连接
DBconn.close()
toc_end = time.time()
# 打印分析结果的输出文件名
print("Analysis results available as CSV file with Name " + output + '_' + 'Detections.csv')
print("Analysis results available as Excel file with statistics as " + output + '_' + 'Detections.xlsx')
print("Analysis results availble as CSV file with Name "+output+'_'+'Detections.csv')
print("Analysis results availble as Excel file with statistics as "+output+'_'+'Detections.xlsx')

@ -305,7 +305,6 @@
],
"level": "critical",
"rule": [
// CobaltStrike使
"SELECT * FROM Events WHERE (EventID IN ('17', '18') AND ((PipeName LIKE '%\\\\MSSE-%' ESCAPE '\\' AND PipeName LIKE '%-server%' ESCAPE '\\') OR PipeName LIKE '\\\\postex\\_%' ESCAPE '\\' OR PipeName LIKE '\\\\status\\_%' ESCAPE '\\' OR PipeName LIKE '\\\\msagent\\_%' ESCAPE '\\' OR PipeName LIKE '\\\\mojo\\_%' ESCAPE '\\' OR PipeName LIKE '\\\\interprocess\\_%' ESCAPE '\\' OR PipeName LIKE '\\\\samr\\_%' ESCAPE '\\' OR PipeName LIKE '\\\\netlogon\\_%' ESCAPE '\\' OR PipeName LIKE '\\\\srvsvc\\_%' ESCAPE '\\' OR PipeName LIKE '\\\\lsarpc\\_%' ESCAPE '\\' OR PipeName LIKE '\\\\wkssvc\\_%' ESCAPE '\\'))"
],
"filename": "pipe_created_mal_cobaltstrike.yml"
@ -328,7 +327,6 @@
],
"level": "critical",
"rule": [
//
"SELECT * FROM Events WHERE (EventID IN ('17', '18') AND (PipeName LIKE '%\\\\lsadump%' ESCAPE '\\' OR PipeName LIKE '%\\\\cachedump%' ESCAPE '\\' OR PipeName LIKE '%\\\\wceservicepipe%' ESCAPE '\\'))"
],
"filename": "pipe_created_cred_dump_tools_named_pipes.yml"
@ -349,16 +347,10 @@
],
"level": "low",
"rule": [
// PsExec
"SELECT * FROM Events WHERE (EventID IN ('17', '18') AND PipeName LIKE '\\\\PSEXESVC' ESCAPE '\\')"
],
"filename": "pipe_created_psexec_default_pipe.yml"
},
// CobaltStrikePsExec使
// ID
// CobaltStrike
// lsadumpcachedumpwceservicepipe
// PsExecPSEXESVC
{
"title": "PAExec Default Named Pipe",
"id": "f6451de4-df0a-41fa-8d72-b39f54a08db5",
@ -39216,6 +39208,3 @@
"filename": "raw_access_thread_disk_access_using_illegitimate_tools.yml"
}
]
// PsExecPsExec
// IDPSEXESVC
// PsExec

Binary file not shown.

Before

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 236 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 86 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 70 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 222 KiB

@ -1,107 +1,101 @@
# 尝试创建一个名为 "wineventlog" 的目录
try {
New-Item -ItemType "directory" -Path "wineventlog"
try{
New-Item -ItemType "directory" -Path "wineventlog"
}
catch {
# 如果创建目录失败,输出错误信息
echo "can't create a new directory"
catch
{
echo "can't create a new directory"
}
# 尝试获取安全日志并导出为 CSV 文件
try {
get-eventlog -log Security | export-csv wineventlog/Security.csv
try{
get-eventlog -log Security | export-csv wineventlog/Security.csv
}
catch {
# 如果获取安全日志失败,输出错误信息
echo "Can't retrieve Security Logs"
catch
{
echo "Can't retrieve Security Logs"
}
# 尝试获取系统日志并导出为 CSV 文件
try {
Get-WinEvent -LogName System | export-csv wineventlog/System.csv
try
{
Get-WinEvent -LogName System | export-csv wineventlog/System.csv
}
catch {
# 如果获取系统日志失败,输出错误信息
echo "Can't retrieve System Logs"
catch
{
echo "Can't retrieve System Logs"
}
# 尝试获取应用程序日志并导出为 CSV 文件
try {
Get-WinEvent -LogName Application | export-csv wineventlog/Application.csv
try{
Get-WinEvent -LogName Application | export-csv wineventlog/Application.csv
}
catch {
# 如果获取应用程序日志失败,输出错误信息
echo "Can't retrieve Application Logs"
catch
{
echo "Can't retrieve Application Logs"
}
# 尝试获取 Windows PowerShell 日志并导出为 CSV 文件
try {
Get-WinEvent -LogName "Windows PowerShell" | export-csv wineventlog/Windows_PowerShell.csv
try{
Get-WinEvent -LogName "Windows PowerShell" | export-csv wineventlog/Windows_PowerShell.csv
}
catch {
# 如果获取 Windows PowerShell 日志失败,输出错误信息
echo "Can't retrieve Windows PowerShell Logs"
catch
{
echo "Can't retrieve Windows PowerShell Logs"
}
# 尝试获取 Microsoft-Windows-TerminalServices-LocalSessionManager/Operational 日志并导出为 CSV 文件
try {
Get-WinEvent -LogName "Microsoft-Windows-TerminalServices-LocalSessionManager/Operational" | export-csv wineventlog/LocalSessionManager.csv
try{
Get-WinEvent -LogName "Microsoft-Windows-TerminalServices-LocalSessionManager/Operational" | export-csv wineventlog/LocalSessionManager.csv
}
catch {
# 如果获取 LocalSessionManager 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-TerminalServices-LocalSessionManager/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-TerminalServices-LocalSessionManager/Operational Logs"
}
# 尝试获取 Microsoft-Windows-Windows Defender/Operational 日志并导出为 CSV 文件
try {
Get-WinEvent -LogName "Microsoft-Windows-Windows Defender/Operational" | export-csv wineventlog/Windows_Defender.csv
try{
Get-WinEvent -LogName "Microsoft-Windows-Windows Defender/Operational" | export-csv wineventlog/Windows_Defender.csv
}
catch {
# 如果获取 Windows Defender 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-Windows Defender/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-Windows Defender/Operational Logs"
}
# 尝试获取 Microsoft-Windows-TaskScheduler/Operational 日志并导出为 CSV 文件
try {
Get-WinEvent -LogName Microsoft-Windows-TaskScheduler/Operational | export-csv wineventlog/TaskScheduler.csv
try{
Get-WinEvent -LogName Microsoft-Windows-TaskScheduler/Operational | export-csv wineventlog/TaskScheduler.csv
}
catch {
# 如果获取 TaskScheduler 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-TaskScheduler/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-TaskScheduler/Operational Logs"
}
# 尝试获取 Microsoft-Windows-WinRM/Operational 日志并导出为 CSV 文件
try {
Get-WinEvent -LogName Microsoft-Windows-WinRM/Operational | export-csv wineventlog/WinRM.csv
try{
Get-WinEvent -LogName Microsoft-Windows-WinRM/Operational | export-csv wineventlog/WinRM.csv
}
catch {
# 如果获取 WinRM 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-WinRM/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-WinRM/Operational Logs"
}
# 尝试获取 Microsoft-Windows-Sysmon/Operational 日志并导出为 CSV 文件
try {
Get-WinEvent -LogName Microsoft-Windows-Sysmon/Operational | export-csv wineventlog/Sysmon.csv
try{
Get-WinEvent -LogName Microsoft-Windows-Sysmon/Operational | export-csv wineventlog/Sysmon.csv
}
catch {
# 如果获取 Sysmon 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-Sysmon/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-Sysmon/Operational Logs"
}
# 尝试获取 Microsoft-Windows-PowerShell/Operational 日志并导出为 CSV 文件
try {
Get-WinEvent -LogName Microsoft-Windows-PowerShell/Operational | export-csv wineventlog/Powershell_Operational.csv
try{
Get-WinEvent -LogName Microsoft-Windows-PowerShell/Operational | export-csv wineventlog/Powershell_Operational.csv
}
catch {
# 如果获取 PowerShell Operational 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-PowerShell/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-PowerShell/Operational Logs"
}
# 尝试压缩 "wineventlog" 目录为 logs.zip
try {
Compress-Archive -Path wineventlog -DestinationPath ./logs.zip
try
{
Compress-Archive -Path wineventlog -DestinationPath ./logs.zip
}
catch {
# 如果压缩失败,输出错误信息
echo "couldn't compress the log folder"
catch
{
echo "couldn't compress the the log folder "
}

@ -1,107 +1,101 @@
# 尝试创建一个名为 "wineventlog" 的目录
try {
New-Item -ItemType "directory" -Path "wineventlog"
try{
New-Item -ItemType "directory" -Path "wineventlog"
}
catch {
# 如果创建目录失败,输出错误信息
echo "can't create a new directory"
catch
{
echo "can't create a new directory"
}
# 尝试导出安全日志到指定的 EVTX 文件
try {
wevtutil epl Security wineventlog/Security.evtx
try{
wevtutil epl Security wineventlog/Security.evtx
}
catch {
# 如果导出安全日志失败,输出错误信息
echo "Can't retrieve Security Logs"
catch
{
echo "Can't retrieve Security Logs"
}
# 尝试导出系统日志到指定的 EVTX 文件
try {
wevtutil epl System wineventlog/System.evtx
try
{
wevtutil epl System wineventlog/System.evtx
}
catch {
# 如果导出系统日志失败,输出错误信息
echo "Can't retrieve System Logs"
catch
{
echo "Can't retrieve System Logs"
}
# 尝试导出应用程序日志到指定的 EVTX 文件
try {
wevtutil epl Application wineventlog/Application.evtx
try{
wevtutil epl Application wineventlog/Application.evtx
}
catch {
# 如果导出应用程序日志失败,输出错误信息
echo "Can't retrieve Application Logs"
catch
{
echo "Can't retrieve Application Logs"
}
# 尝试导出 Windows PowerShell 日志到指定的 EVTX 文件
try {
wevtutil epl "Windows PowerShell" wineventlog/Windows_PowerShell.evtx
try{
wevtutil epl "Windows PowerShell" wineventlog/Windows_PowerShell.evtx
}
catch {
# 如果导出 Windows PowerShell 日志失败,输出错误信息
echo "Can't retrieve Windows PowerShell Logs"
catch
{
echo "Can't retrieve Windows PowerShell Logs"
}
# 尝试导出 Microsoft-Windows-TerminalServices-LocalSessionManager/Operational 日志到指定的 EVTX 文件
try {
wevtutil epl "Microsoft-Windows-TerminalServices-LocalSessionManager/Operational" wineventlog/LocalSessionManager.evtx
try{
wevtutil epl "Microsoft-Windows-TerminalServices-LocalSessionManager/Operational" wineventlog/LocalSessionManager.evtx
}
catch {
# 如果导出 LocalSessionManager 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-TerminalServices-LocalSessionManager/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-TerminalServices-LocalSessionManager/Operational Logs"
}
# 尝试导出 Microsoft-Windows-Windows Defender/Operational 日志到指定的 EVTX 文件
try {
wevtutil epl "Microsoft-Windows-Windows Defender/Operational" wineventlog/Windows_Defender.evtx
try{
wevtutil epl "Microsoft-Windows-Windows Defender/Operational" wineventlog/Windows_Defender.evtx
}
catch {
# 如果导出 Windows Defender 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-Windows Defender/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-Windows Defender/Operational Logs"
}
# 尝试导出 Microsoft-Windows-TaskScheduler/Operational 日志到指定的 EVTX 文件
try {
wevtutil epl Microsoft-Windows-TaskScheduler/Operational wineventlog/TaskScheduler.evtx
try{
wevtutil epl Microsoft-Windows-TaskScheduler/Operational wineventlog/TaskScheduler.evtx
}
catch {
# 如果导出 TaskScheduler 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-TaskScheduler/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-TaskScheduler/Operational Logs"
}
# 尝试导出 Microsoft-Windows-WinRM/Operational 日志到指定的 EVTX 文件
try {
wevtutil epl Microsoft-Windows-WinRM/Operational wineventlog/WinRM.evtx
try{
wevtutil epl Microsoft-Windows-WinRM/Operational wineventlog/WinRM.evtx
}
catch {
# 如果导出 WinRM 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-WinRM/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-WinRM/Operational Logs"
}
# 尝试导出 Microsoft-Windows-Sysmon/Operational 日志到指定的 EVTX 文件
try {
wevtutil epl Microsoft-Windows-Sysmon/Operational wineventlog/Sysmon.evtx
try{
wevtutil epl Microsoft-Windows-Sysmon/Operational wineventlog/Sysmon.evtx
}
catch {
# 如果导出 Sysmon 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-Sysmon/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-Sysmon/Operational Logs"
}
# 尝试导出 Microsoft-Windows-PowerShell/Operational 日志到指定的 EVTX 文件
try {
wevtutil epl Microsoft-Windows-PowerShell/Operational wineventlog/Powershell_Operational.evtx
try{
wevtutil epl Microsoft-Windows-PowerShell/Operational wineventlog/Powershell_Operational.evtx
}
catch {
# 如果导出 PowerShell Operational 日志失败,输出错误信息
echo "Can't retrieve Microsoft-Windows-PowerShell/Operational Logs"
catch
{
echo "Can't retrieve Microsoft-Windows-PowerShell/Operational Logs"
}
# 尝试压缩 "wineventlog" 目录为 logs.zip
try {
Compress-Archive -Path wineventlog -DestinationPath ./logs.zip
}
catch {
# 如果压缩失败,输出错误信息
echo "couldn't compress the log folder"
}
try
{
Compress-Archive -Path wineventlog -DestinationPath ./logs.zip
}
catch
{
echo "couldn't compress the the log folder "
}

Loading…
Cancel
Save