2024.12.17第二次修改

AizenSousuke
DengYanjia 8 months ago
parent 51da9a2377
commit 602d857b22

@ -493,71 +493,115 @@ def optimised_parse_mp(file):
'ParentUser': ['Event_EventData_ParentUser']}
parser = PyEvtxParser(str(file))
for record in parser.records_json():
data=flatten(json.loads(record["data"]))
for key in mapping.keys():
requiredfield = "None"
for field in mapping[key]:
if field in data:
requiredfield=field
break
if requiredfield!="None":
if isinstance(data[requiredfield], list):
Alldata[key].append(",".join(data[requiredfield]))
else:
Alldata[key].append(str(data[requiredfield]))
for record in parser.records_json():
# 将JSON格式的事件数据展平方便后续处理
data = flatten(json.loads(record["data"]))
for key in mapping.keys():
requiredfield = "None"
# 遍历mapping中的字段找到第一个在数据中存在的字段
for field in mapping[key]:
if field in data:
requiredfield = field
break
if requiredfield != "None":
# 如果找到的字段值是一个列表则将列表中的值以逗号分隔并加入到Alldata中
if isinstance(data[requiredfield], list):
Alldata[key].append(",".join(data[requiredfield]))
else:
if field == "Original_Event_Log":
Alldata[key].append(record["data"])
#Alldata[key].append(None)
else:
Alldata[key].append(None)
# 如果不是列表直接将值转换为字符串并加入到Alldata中
Alldata[key].append(str(data[requiredfield]))
else:
# 如果没有找到任何匹配的字段
if field == "Original_Event_Log":
# 对于原始事件日志将整个事件数据加入到Alldata中
Alldata[key].append(record["data"])
else:
# 对于其他未找到的字段添加None值
Alldata[key].append(None)
#print("finished Parsing")
#print(Alldata)
# 使用锁来确保多进程环境下的数据插入是线程安全的
l.acquire()
#print("Inserting data into "+DB)
insert_into_db_mp(Alldata, DB)
l.release()
print("Done Parsing : "+str(file))
print("Done Parsing : " + str(file))
def clean(DBName):
# specify the path to the file to be removed
"""
清理指定的SQLite数据库文件
参数:
- DBName: SQLite数据库文件名
"""
file_path = DBName
# check if the file exists
# 检查文件是否存在
if os.path.isfile(file_path):
# remove the file
# 删除文件
os.remove(file_path)
print(f"Temp Database has been removed.")
else:
print(f"Temp Database does not exist.")
def init(l):
"""
初始化进程的全局锁
参数:
- l: 锁对象
"""
global lock
lock = l
def Sigma_Analyze(Path, rules,output, DBName="Events.sqlite"):
global l,DBconn,DB
def Sigma_Analyze(Path, rules, output, DBName="Events.sqlite"):
"""
使用Sigma规则分析Windows事件日志
参数:
- Path: 事件日志文件路径
- rules: Sigma规则文件路径
- output: 输出文件名前缀
- DBName: SQLite数据库文件名
"""
global l, DBconn, DB
tic_start = time.time()
DB=DBName
DB = DBName
# 创建SQLite数据库
Create_DB(DB)
print("Analyzing logs using Sigma with below config : ")
print(f"Logs Path : {Path}\nSigma Rules file : {rules}\nProfile : {output}")
# 使用多进程加速处理
pool = multiprocessing.Pool(multiprocessing.cpu_count(), initializer=init, initargs=(l,))
# 自动检测日志文件
files = auto_detect(Path)
# 多进程解析日志文件
results = pool.map(optimised_parse_mp, files)
# 将Sigma规则插入数据库
RulesToDB(rules, DB)
# 连接到数据库
DBconn = sqlite3.connect(DB)
optimised_search(DB,output)
# 优化搜索应用Sigma规则
optimised_search(DB, output)
# 清理临时数据库
clean(DBName)
# 关闭数据库连接
DBconn.close()
toc_end = time.time()
print("Analysis results availble as CSV file with Name "+output+'_'+'Detections.csv')
print("Analysis results availble as Excel file with statistics as "+output+'_'+'Detections.xlsx')
# 打印分析结果的输出文件名
print("Analysis results available as CSV file with Name " + output + '_' + 'Detections.csv')
print("Analysis results available as Excel file with statistics as " + output + '_' + 'Detections.xlsx')

Loading…
Cancel
Save