diff --git a/src/lib/EvtxHunt.py b/src/lib/EvtxHunt.py index 54c242e..c3eebc3 100644 --- a/src/lib/EvtxHunt.py +++ b/src/lib/EvtxHunt.py @@ -1,72 +1,75 @@ import csv import re -from netaddr import * -import xml.etree.ElementTree as ET -import pandas as pd -from datetime import datetime , timezone -from evtx import PyEvtxParser -from dateutil.parser import parse -from dateutil.parser import isoparse -from pytz import timezone -minlength=1000 - -Hunting_events=[{'Date and Time':[],'timestamp':[],'Channel':[],'Computer':[],'Event ID':[],'Original Event Log':[]}] - +from netaddr import * # 导入netaddr库的所有内容,用于处理网络地址 +import xml.etree.ElementTree as ET # XML解析器 +import pandas as pd # 数据分析库 +from datetime import datetime, timezone # 日期时间处理 +from evtx import PyEvtxParser # 解析Windows事件日志文件的库 +from dateutil.parser import parse, isoparse # 解析日期时间字符串 +from pytz import timezone # 处理时区 +minlength = 1000 # 可能用于某个字符串长度的检查,但在这个文件中未使用 +# 初始化一个字典列表,用于存储猎取的事件信息 +Hunting_events = [{'Date and Time': [], 'timestamp': [], 'Channel': [], 'Computer': [], 'Event ID': [], 'Original Event Log': []}] +# 正则表达式用于从事件日志中提取特定信息 EventID_rex = re.compile('(.*)<\/EventID>', re.IGNORECASE) Channel_rex = re.compile('(.*)<\/Channel>', re.IGNORECASE) Computer_rex = re.compile('(.*)<\/Computer>', re.IGNORECASE) -def Evtx_hunt(files,str_regexes,eid,input_timzone,output,timestart,timeend): +def Evtx_hunt(files, str_regexes, eid, input_timzone, output, timestart, timeend): + """ + 解析并搜索Windows事件日志文件中的特定事件。 + 参数: + - files: 要解析的事件日志文件列表 + - str_regexes: 用于匹配事件数据的正则表达式列表 + - eid: 事件ID,如果提供则只搜索此ID的事件 + - input_timzone: 输入日志的时区 + - output: 输出文件名 + - timestart, timeend: 搜索时间范围 + """ for file in files: - file=str(file) - print("Analyzing "+file) + file = str(file) + print("Analyzing " + file) try: parser = PyEvtxParser(file) except: - print("Issue analyzing "+file +"\nplease check if its not corrupted") + print("Issue analyzing " + file + "\nplease check if its not corrupted") continue - try: - - for record in parser.records(): - + + for record in parser.records(): + try: + # 提取事件ID EventID = EventID_rex.findall(record['data']) - + # 如果提供了时间范围,则检查事件是否在该范围内 if timestart is not None and timeend is not None: timestamp = datetime.timestamp(isoparse(parse(record["timestamp"]).astimezone(input_timzone).isoformat())) if not (timestamp > timestart and timestamp < timeend): - return - if len(EventID) > 0: - if eid is not None and EventID[0]!=eid: - continue - + continue # 事件不在时间范围内,跳过 + # 如果有EventID并且匹配eid(如果eid不为None) + if len(EventID) > 0 and (eid is None or EventID[0] == eid): Computer = Computer_rex.findall(record['data']) Channel = Channel_rex.findall(record['data']) - if len(Channel)>0: - channel=Channel[0] - else: - channel=" " - #print(record['data']) - # if record['data'].lower().find(str_regex.lower())>-1: - #print(str_regexes) + channel = Channel[0] if len(Channel) > 0 else " " + # 遍历所有提供的正则表达式 for str_regex in str_regexes: - rex=re.compile(str_regex, re.IGNORECASE) - #print(rex) - #print(rex.findall(record['data'])) + rex = re.compile(str_regex, re.IGNORECASE) if rex.findall(record['data']): - #print("EventID : "+EventID[0]+" , Data : "+record['data']) + # 如果匹配到正则表达式,记录事件信息 Hunting_events[0]['timestamp'].append(datetime.timestamp(isoparse(parse(record["timestamp"]).astimezone(input_timzone).isoformat()))) Hunting_events[0]['Date and Time'].append(parse(record["timestamp"]).astimezone(input_timzone).isoformat()) Hunting_events[0]['Channel'].append(channel) Hunting_events[0]['Event ID'].append(EventID[0]) Hunting_events[0]['Computer'].append(Computer[0]) Hunting_events[0]['Original Event Log'].append(str(record['data']).replace("\r", " ").replace("\n", " ")) - except Exception as e: - print("issue searching log : "+record['data']+"\n Error : "+print(e)) + except Exception as e: + print("issue searching log : " + record['data'] + "\n Error : " + str(e)) # 修正了错误的打印函数调用 hunt_report(output) - - def hunt_report(output): + """ + 生成猎取事件的报告。 + 参数: + - output: 输出CSV文件的前缀 + """ global Hunting_events Events = pd.DataFrame(Hunting_events[0]) - print("Found "+str(len(Hunting_events[0]["timestamp"]))+" Events") - Events.to_csv(output+"_hunting.csv", index=False) + print("Found " + str(len(Hunting_events[0]["timestamp"])) + " Events") + Events.to_csv(output + "_hunting.csv", index=False) diff --git a/src/lib/O365Hunter.py b/src/lib/O365Hunter.py index 7df256a..c842205 100644 --- a/src/lib/O365Hunter.py +++ b/src/lib/O365Hunter.py @@ -7,25 +7,23 @@ import pandas as pd import geoip2.database import requests from dateutil import parser, tz -import pandas as pd -import json -import csv from pathlib import Path -start_time=0 -end_time=0 +# 初始化全局变量用于计时 +start_time = 0 +end_time = 0 + +# SQL查询语句用于检测密码喷洒攻击 password_spray_query = ''' WITH FailedLogins AS ( - SELECT UserId, - ClientIP, + ClientIP, datetime(CreationTime) AS LoginDate FROM events WHERE Operation = 'UserLoginFailed' - ) SELECT UserId, @@ -33,18 +31,18 @@ SELECT COUNT(DISTINCT ClientIP) AS UniqueIPCount, COUNT(*) AS FailedLoginAttempts, LoginDate - FROM FailedLogins GROUP BY UserId, - strftime('%Y-%m-%d %H', LoginDate) + strftime('%Y-%m-%d %H', LoginDate) HAVING COUNT(*) > 5 AND UniqueIPCount > 3 ORDER BY FailedLoginAttempts DESC; - ''' +''' +# SQL查询语句用于跟踪用户登录活动 user_logon_query = ''' SELECT UserId, @@ -52,18 +50,19 @@ SELECT COUNT(*) AS TotalLoginAttempts, SUM(CASE WHEN Operation = 'UserLoggedIn' THEN 1 ELSE 0 END) AS SuccessfulLogins, SUM(CASE WHEN Operation = 'UserLoginFailed' THEN 1 ELSE 0 END) AS FailedLogins - FROM - events - where +FROM + events +WHERE Operation = 'UserLoggedIn' OR Operation = 'UserLoginFailed' - GROUP BY - UserId, - LoginDate - ORDER BY - LoginDate, - UserId; +GROUP BY + UserId, + LoginDate +ORDER BY + LoginDate, + UserId; ''' +# SQL查询语句用于统计用户执行的操作 User_operations_query = ''' SELECT UserId, @@ -77,12 +76,13 @@ ORDER BY OperationCount DESC; ''' +# SQL查询语句用于按天统计用户操作 user_operation_by_day_query = ''' SELECT UserId, DATE(CreationTime) AS OperationDate, COUNT(DISTINCT Operation) AS OperationCount, - GROUP_CONCAT( Operation, ', ') AS UniqueOperations + GROUP_CONCAT(Operation, ', ') AS UniqueOperations FROM events GROUP BY @@ -92,138 +92,162 @@ ORDER BY OperationCount DESC ''' +def convert_csv(input_file, temp): + """ + 将CSV文件转换为JSON格式的文件。 -def convert_csv(input_file,temp): - with open(input_file, 'r', encoding='utf-8') as csv_file: - # Create a CSV reader - reader = csv.DictReader(csv_file) - - json_file = 'audit_data.json' - json_file=os.path.join(temp, json_file) - with open(json_file, 'w', encoding='utf-8') as jsonl_file: - # Extract and write the AuditData column to a file as JSON Lines - for row in reader: - # Extract the AuditData which is already a JSON formatted string - json_data = json.loads(row['AuditData']) - # Convert the JSON object back to a string to store in the file - json_string = json.dumps(json_data) - # Write the JSON string to the file with a newline - jsonl_file.write(json_string + '\n') + 参数: + - input_file: 输入的CSV文件路径 + - temp: 临时目录路径 + 返回: + - json_file: 生成的JSON文件路径 + """ + json_file = os.path.join(temp, 'audit_data.json') + with open(input_file, 'r', encoding='utf-8') as csv_file, open(json_file, 'w', encoding='utf-8') as jsonl_file: + reader = csv.DictReader(csv_file) + for row in reader: + json_data = json.loads(row['AuditData']) + json_string = json.dumps(json_data) + jsonl_file.write(json_string + '\n') return json_file - def flatten_json_file(input_file, timezone, chunk_size=10000): - # Read the JSON file in chunks + """ + 将JSON文件展平并处理时间戳。 + + 参数: + - input_file: 输入的JSON文件路径 + - timezone: 目标时区 + - chunk_size: 处理的块大小 + + 返回: + - DataFrame: 展平后的数据 + """ chunks = [] with open(input_file, 'r') as file: lines = file.readlines() for i in range(0, len(lines), chunk_size): chunk = [json.loads(line) for line in lines[i:i + chunk_size]] - - # Convert the CreationTime to the desired timezone for record in chunk: if 'CreationTime' in record: - # Parse the CreationTime creation_time = parser.parse(record['CreationTime']) - - # Check if the datetime object is timezone aware if creation_time.tzinfo is None: - # Assume the original time is in UTC if no timezone info is present creation_time = creation_time.replace(tzinfo=tz.tzutc()) - - # Convert the CreationTime to the desired timezone record['CreationTime'] = creation_time.astimezone(timezone).isoformat() - chunks.append(pd.json_normalize(chunk)) - - # Concatenate all chunks into a single DataFrame - flattened_records = pd.concat(chunks, ignore_index=True) - - return flattened_records - + return pd.concat(chunks, ignore_index=True) def create_sqlite_db_from_dataframe(dataframe, db_name): - conn = sqlite3.connect(db_name) + """ + 从Pandas DataFrame创建SQLite数据库。 - # Convert all columns to string + 参数: + - dataframe: 包含数据的Pandas DataFrame + - db_name: SQLite数据库文件名 + """ + conn = sqlite3.connect(db_name) dataframe = dataframe.astype(str) - - # Write the DataFrame to SQLite, treating all fields as text dataframe.to_sql('events', conn, if_exists='replace', index=False, dtype={col_name: 'TEXT' for col_name in dataframe.columns}) - conn.close() - def read_detection_rules(rule_file): - with open(rule_file, 'r') as file: - rules = json.load(file) - return rules + """ + 从文件中读取检测规则。 + + 参数: + - rule_file: 包含检测规则的JSON文件路径 + 返回: + - rules: 规则列表 + """ + with open(rule_file, 'r') as file: + return json.load(file) def apply_detection_logic_sqlite(db_name, rules): + """ + 应用检测逻辑到SQLite数据库。 + + 参数: + - db_name: SQLite数据库文件名 + - rules: 检测规则列表 + + 返回: + - DataFrame: 检测到的异常事件 + """ conn = sqlite3.connect(db_name) all_detected_events = [] - for rule in rules: rule_name = rule['name'] severity = rule['severity'] query = rule['query'] - detected_events = pd.read_sql_query(query, conn) detected_events['RuleName'] = rule_name detected_events['Severity'] = severity - all_detected_events.append(detected_events) - conn.close() - - if all_detected_events: - result = pd.concat(all_detected_events, ignore_index=True) - else: - result = pd.DataFrame() - - return result + return pd.concat(all_detected_events, ignore_index=True) if all_detected_events else pd.DataFrame() def download_geolite_db(geolite_db_path): + """ + 下载GeoLite2数据库用于IP地理定位。 + + 参数: + - geolite_db_path: 保存GeoLite2数据库的路径 + """ url = "https://git.io/GeoLite2-Country.mmdb" print(f"Downloading GeoLite2 database from {url}...") response = requests.get(url) - response.raise_for_status() # Check if the download was successful - + response.raise_for_status() with open(geolite_db_path, 'wb') as file: file.write(response.content) print(f"GeoLite2 database downloaded and saved to {geolite_db_path}") def get_country_from_ip(ip, reader): + """ + 根据IP地址获取国家名称。 + + 参数: + - ip: IP地址 + - reader: GeoLite2数据库的读取器 + + 返回: + - str: 国家名称或'Unknown'如果无法解析 + """ try: - response = reader.country(ip) - return response.country.name + return reader.country(ip).country.name except Exception as e: - #print(f"Could not resolve IP {ip}: {e}") + print(f"Could not resolve IP {ip}: {e}") return 'Unknown' - def analyzeoff365(auditfile, rule_file, output, timezone, include_flattened_data=False, geolite_db_path='GeoLite2-Country.mmdb'): + """ + 分析Office 365审计日志并生成报告。 + + 参数: + - auditfile: Office 365审计日志文件路径 + - rule_file: 检测规则文件路径 + - output: 输出目录 + - timezone: 目标时区 + - include_flattened_data: 是否包含展平后的数据 + - geolite_db_path: GeoLite2数据库文件路径 + """ + global start_time, end_time start_time = time.time() temp_dir = ".temp" if output is None or output == "": output = os.path.splitext(auditfile)[0] + try: - # Create necessary directories os.makedirs(output, exist_ok=True) os.makedirs(temp_dir, exist_ok=True) - # Check if the GeoLite2 database exists, and download it if not if not os.path.exists(geolite_db_path): download_geolite_db(geolite_db_path) - # Convert CSV to JSON (assuming convert_csv is a valid function that you have) json_file = convert_csv(auditfile, temp_dir) - - # Input and output file paths input_file = json_file db_name = os.path.join(temp_dir, 'audit_data.db') @@ -231,36 +255,28 @@ def analyzeoff365(auditfile, rule_file, output, timezone, include_flattened_data rule_file = 'O365_detection_rules.json' output_file = f"{output}_o365_report.xlsx" - # Measure the start time - - - # Flatten the JSON file + # 展平JSON数据并处理时间戳 flattened_df = flatten_json_file(input_file, timezone) - # Create SQLite database from the flattened DataFrame + # 创建SQLite数据库 create_sqlite_db_from_dataframe(flattened_df, db_name) - # Open the GeoLite2 database + # 使用GeoLite2数据库解析IP地址 with geoip2.database.Reader(geolite_db_path) as reader: - # Resolve ClientIP to country names if 'ClientIP' in flattened_df.columns: flattened_df['Country'] = flattened_df['ClientIP'].apply(lambda ip: get_country_from_ip(ip, reader)) - # Read detection rules + # 读取检测规则并应用 rules = read_detection_rules(rule_file) - - # Apply detection logic using SQLite detected_events = apply_detection_logic_sqlite(db_name, rules) - # Reorder columns to make RuleName the first column + # 重新排序DataFrame列以便RuleName在前 if not detected_events.empty: - columns = ['RuleName', 'Severity'] + [col for col in detected_events.columns if - col not in ['RuleName', 'Severity']] + columns = ['RuleName', 'Severity'] + [col for col in detected_events.columns if col not in ['RuleName', 'Severity']] detected_events = detected_events[columns] - # Perform the brute-force detection query + # 执行其他SQL查询 conn = sqlite3.connect(db_name) - try: user_login_tracker_df = pd.read_sql_query(user_logon_query, conn) password_spray_df = pd.read_sql_query(password_spray_query, conn) @@ -269,20 +285,19 @@ def analyzeoff365(auditfile, rule_file, output, timezone, include_flattened_data finally: conn.close() - # Create a new workbook with the detection results + # 生成Excel报告 with pd.ExcelWriter(output_file, engine='xlsxwriter') as writer: if include_flattened_data: - # Split the flattened data into multiple sheets if needed + # 将展平后的数据分成多个工作表 max_rows_per_sheet = 65000 num_sheets = len(flattened_df) // max_rows_per_sheet + 1 - for i in range(num_sheets): start_row = i * max_rows_per_sheet end_row = (i + 1) * max_rows_per_sheet sheet_name = f'Flattened Data {i + 1}' flattened_df.iloc[start_row:end_row].to_excel(writer, sheet_name=sheet_name, index=False) - # Write statistics for various fields + # 写入各种统计信息到不同的工作表 detected_events.to_excel(writer, sheet_name='Detection Results', index=False) user_login_tracker_df.to_excel(writer, sheet_name='User Login Tracker', index=False) password_spray_df.to_excel(writer, sheet_name='Password Spray Attacks', index=False) @@ -293,10 +308,8 @@ def analyzeoff365(auditfile, rule_file, output, timezone, include_flattened_data flattened_df['Country'].value_counts().to_frame().to_excel(writer, sheet_name='Country Stats') flattened_df['UserAgent'].value_counts().to_frame().to_excel(writer, sheet_name='UserAgent Stats') flattened_df['UserId'].value_counts().to_frame().to_excel(writer, sheet_name='UserId Stats') - flattened_df['AuthenticationType'].value_counts().to_frame().to_excel(writer, - sheet_name='AuthenticationType Stats') + flattened_df['AuthenticationType'].value_counts().to_frame().to_excel(writer, sheet_name='AuthenticationType Stats') - # Measure the end time end_time = time.time() print(f"Office365 analysis finished in time: {end_time - start_time:.2f} seconds") @@ -304,18 +317,12 @@ def analyzeoff365(auditfile, rule_file, output, timezone, include_flattened_data print(f"An error occurred during the analysis: {e}") finally: - #Clean up the temporary directory + # 清理临时目录 if os.path.exists(temp_dir): for file in Path(temp_dir).glob('*'): - file.unlink() # Delete the file - os.rmdir(temp_dir) # Remove the directory + file.unlink() + os.rmdir(temp_dir) - - # Write the User Login Tracker results to a new sheet - - # Measure the end time end_time = time.time() - - # Calculate and print the running time running_time = end_time - start_time print(f"Office365 hunter finished in time: {running_time:.2f} seconds") diff --git a/src/lib/SigmaHunter.py b/src/lib/SigmaHunter.py index 9114758..48a7e2d 100644 --- a/src/lib/SigmaHunter.py +++ b/src/lib/SigmaHunter.py @@ -1,28 +1,32 @@ -from evtx import PyEvtxParser -import glob +# 导入所需的库 +from evtx import PyEvtxParser#用于解析Windows事件日志 +import glob#用于文件路径的匹配。它可以用来搜索符合特定模式的文件路径 import os -import re -from pathlib import Path as libPath -import pandas as pd +import re#正则表达式库。用于字符串的模式匹配、搜索、替换等操作 +from pathlib import Path as libPath#提供了面向对象的方式来处理文件系统路径 +import pandas as pd#一个非常强大的数据处理库,提供DataFrame和Series数据结构,简化了数据的操作、分析和清理 import json -import sqlite3 -from flatten_json import flatten +import sqlite3#Python内置的SQLite数据库接口 +from flatten_json import flatten#一个用于扁平化嵌套JSON数据的库 import time -import multiprocessing - +import multiprocessing#允许在Python中进行多进程并行处理的库 +# 初始化一个字典,用于存储所有事件数据 Alldata={'Original_Event_Log':[],'TargetObject': [], 'Channel': [], 'Computer': [], 'Correlation': [], 'EventID': [], 'EventRecordID': [], 'ProcessID': [], 'ThreadID': [], 'Keywords': [], 'Level': [], 'Opcode': [], 'Guid': [], 'Name': [], 'UserID': [], 'Task': [], 'SystemTime': [], 'Version': [], 'Status': [], 'ActivityID': [], 'Context': [], 'ErrorCode': [], 'AppId': [], 'DCName': [], 'Binary': [], 'Qualifiers': [], 'Security': [], 'Path': [], 'ScriptBlockText': [], 'param1': [], 'param2': [], 'ContextInfo': [], 'Payload': [], 'UserData': [], 'State': [], 'EventType': [], 'AccountName': [], 'ProcessName': [], 'LogonType': [], 'TaskName': [], 'Message': [], 'Provider': [], 'updateGuid': [], 'updateRevisionNumber': [], 'updateTitle': [], 'DeviceName': [], 'DeviceNameLength': [], 'ClientProcessId': [], 'PossibleCause': [], 'User': [], 'ProviderName': [], 'Query': [], 'value': [], 'Action': [], 'ApplicationPath': [], 'ModifyingApplication': [], 'Origin': [], 'Protocol': [], 'RuleName': [], 'SchemaVersion': [], 'ServiceName': [], 'Filename': [], 'PackagePath': [], 'FileNameBuffer': [], 'UserName': [], 'ShareName': [], 'NewState': [], 'Param3': [], 'EventSourceName': [], 'NumberOfGroupPolicyObjects': [], 'ProcessingMode': [], 'ProcessingTimeInMilliseconds': [], 'HostName': [], 'Ipaddress': [], 'NewTime': [], 'OldTime': [], 'HiveName': [], 'ErrorDescription': [], 'Address': [], 'AddressLength': [], 'QueryName': [], 'TSId': [], 'UserSid': [], 'DeviceTime': [], 'DeviceVersionMajor': [], 'DeviceVersionMinor': [], 'FinalStatus': [], 'ImagePath': [], 'ServiceType': [], 'StartType': [], 'ExtensionId': [], 'ExtensionName': [], 'ShutdownActionType': [], 'ShutdownEventCode': [], 'ShutdownReason': [], 'Group': [], 'IdleStateCount': [], 'Number': [], 'BootMode': [], 'BuildVersion': [], 'MajorVersion': [], 'MinorVersion': [], 'QfeVersion': [], 'ServiceVersion': [], 'StartTime': [], 'StopTime': [], 'TimeSource': [], 'Targetname': [], 'Caption': [], 'ErrorMessage': [], 'RetryMinutes': [], 'Description': [], 'Type': [], 'OperationType': [], 'CommandLine': [], 'PackageName': [], 'Data': [], 'LogonId': [], 'ServerName': [], 'ObjectName': [], 'AccessList': [], 'AccessMask': [], 'HandleId': [], 'ObjectServer': [], 'ObjectType': [], 'SubjectDomainName': [], 'SubjectLogonId': [], 'SubjectUserName': [], 'SubjectUserSid': [], 'NewProcessId': [], 'NewProcessName': [], 'ParentProcessName': [], 'TargetDomainName': [], 'TargetLogonId': [], 'TargetUserName': [], 'TargetUserSid': [], 'TokenElevationType': [], 'NewValue': [], 'ObjectValueName': [], 'OldValue': [], 'Properties': [], 'PrivilegeList': [], 'Service': [], 'AuthenticationPackageName': [], 'ImpersonationLevel': [], 'IpPort': [], 'KeyLength': [], 'LmPackageName': [], 'LogonGuid': [], 'LogonProcessName': [], 'TransmittedServices': [], 'WorkstationName': [], 'CallerProcessName': [], 'TargetSid': [], 'TaskContentNew': [], 'AuditPolicyChanges': [], 'SourceProcessId': [], 'TargetProcessId': [], 'TransactionId': [], 'TargetInfo': [], 'TargetLogonGuid': [], 'TargetServerName': [], 'Details': [], 'PackageFullName': [], 'processPath': [], 'Provider_Name': [], 'Accesses': [], 'AccountDomain': [], 'AccountExpires': [], 'AddonName': [], 'AllowedToDelegateTo': [], 'Application': [], 'AttributeLDAPDisplayName': [], 'AttributeValue': [], 'AuditSourceName': [], 'CallingProcessName': [], 'CallTrace': [], 'Company': [], 'CreationUtcTime': [], 'CurrentDirectory': [], 'DestinationAddress': [], 'DestinationHostname': [], 'DestinationIp': [], 'DestinationIsIpv6': [], 'DestinationPort': [], 'DestinationPortName': [], 'DestPort': [], 'Detail': [], 'DetectionSource': [], 'DeviceClassName': [], 'DeviceDescription': [], 'DisplayName': [], 'EngineVersion': [], 'EventSourceId': [], 'ExtraInfo': [], 'FailureCode': [], 'FailureReason': [], 'FileVersion': [], 'FilterHostProcessID': [], 'GrantedAccess': [], 'GroupDomain': [], 'GroupName': [], 'GroupSid': [], 'Hash': [], 'Hashes': [], 'HomeDirectory': [], 'HomePath': [], 'HostApplication': [], 'HostVersion': [], 'Image': [], 'ImageLoaded': [], 'Initiated': [], 'IntegrityLevel': [], 'LayerRTID': [], 'LDAPDisplayName': [], 'LogonHours': [], 'NewName': [], 'NewThreadId': [], 'NewUacValue': [], 'NotificationPackageName': [], 'ObjectClass': [], 'OldUacValue': [], 'OriginalFileName': [], 'ParentCommandLine': [], 'ParentImage': [], 'ParentProcessGuid': [], 'ParentProcessId': [], 'PasswordLastSet': [], 'PerfStateCount': [], 'PipeName': [], 'PreviousTime': [], 'PrimaryGroupId': [], 'ProcessCommandLine': [], 'ProcessGuid': [], 'Product': [], 'ProfilePath': [], 'ProtocolHostProcessID': [], 'PuaCount': [], 'PuaPolicyId': [], 'Publisher': [], 'QueryResults': [], 'QueryStatus': [], 'RelativeTargetName': [], 'ResourceManager': [], 'SAMAccountName': [], 'ScriptPath': [], 'SecurityPackageName': [], 'ServerID': [], 'ServerURL': [], 'ServicePrincipalNames': [], 'ShareLocalPath': [], 'SidHistory': [], 'Signature': [], 'SignatureStatus': [], 'Signed': [], 'SourceAddress': [], 'SourceHostname': [], 'SourceImage': [], 'SourceIp': [], 'SourceNetworkAddress': [], 'SourceIsIpv6': [], 'SourcePort': [], 'SourcePortName': [], 'SourceProcessGuid': [], 'StartAddress': [], 'StartFunction': [], 'StartModule': [], 'SubStatus': [], 'TargetFileName': [], 'TargetImage': [], 'TargetProcessAddress': [], 'TargetProcessGuid': [], 'TaskContent': [], 'TerminalSessionId': [], 'ThrottleStateCount': [], 'TicketEncryptionType': [], 'TicketOptions': [], 'UserAccountControl': [], 'UserParameters': [], 'UserPrincipalName': [], 'UserWorkstations': [], 'UtcTime': [], 'Workstation': [], 'ParentIntegrityLevel': [], 'ParentUser': []} - +# 映射字典,用于将事件数据的键名映射到数据库列名 mapping={'Original_Event_Log':['Original_Event_Log'],'TargetObject': ['Event_EventData_TargetObject'], 'Channel': ['Event_System_Channel', 'Event_RenderingInfo_Channel'], 'Computer': ['Event_System_Computer'], 'Correlation': ['Event_System_Correlation'], 'EventID': ['Event_System_EventID', 'Event_System_EventID_#text'], 'EventRecordID': ['Event_System_EventRecordID'], 'ProcessID': ['Event_EventData_ProcessID', 'Event_EventData_ProcessId', 'Event_System_Execution_#attributes_ProcessID', 'Event_UserData_Operation_StartedOperational_ProcessID', 'Event_UserData_DroppedLeakDiagnosisEventInfo_ProcessId', 'Event_UserData_CompatibilityFixEvent_ProcessId', 'Event_UserData_Operation_TemporaryEssStarted_Processid', 'Event_EventData_processId'], 'ThreadID': ['Event_System_Execution_#attributes_ThreadID'], 'Keywords': ['Event_System_Keywords'], 'Level': ['Event_System_Level', 'Event_RenderingInfo_Level'], 'Opcode': ['Event_System_Opcode', 'Event_RenderingInfo_Opcode'], 'Guid': ['Event_System_Provider_#attributes_Guid', 'Event_EventData_Guid'], 'Name': ['Event_EventData_name', 'Event_System_Provider_#attributes_Name', 'Event_EventData_#attributes_Name', 'Event_UserData_CertNotificationData_CertificateDetails_EKUs_EKU_#attributes_Name', 'Event_EventData_Name', 'Event_UserData_CertNotificationData_CertificateDetails_Template_#attributes_Name', 'Event_UserData_CertNotificationData_NewCertificateDetails_EKUs_EKU_#attributes_Name', 'Event_UserData_CertNotificationData_NewCertificateDetails_Template_#attributes_Name', 'Event_UserData_CertNotificationData_OldCertificateDetails_EKUs_EKU_#attributes_Name', 'Event_UserData_CertNotificationData_OldCertificateDetails_Template_#attributes_Name', 'Event_UserData_MemoryExhaustionInfo_NonPagedPoolInfo_Tag_1_Name', 'Event_UserData_MemoryExhaustionInfo_NonPagedPoolInfo_Tag_2_Name', 'Event_UserData_MemoryExhaustionInfo_NonPagedPoolInfo_Tag_3_Name', 'Event_UserData_MemoryExhaustionInfo_PagedPoolInfo_Tag_1_Name', 'Event_UserData_MemoryExhaustionInfo_PagedPoolInfo_Tag_2_Name', 'Event_UserData_MemoryExhaustionInfo_PagedPoolInfo_Tag_3_Name', 'Event_UserData_MemoryExhaustionInfo_ProcessInfo_Process_1_Name', 'Event_UserData_MemoryExhaustionInfo_ProcessInfo_Process_2_Name', 'Event_UserData_MemoryExhaustionInfo_ProcessInfo_Process_3_Name', 'Event_UserData_MemoryExhaustionInfo_ProcessInfo_Process_4_Name', 'Event_UserData_MemoryExhaustionInfo_ProcessInfo_Process_5_Name', 'Event_UserData_MemoryExhaustionInfo_ProcessInfo_Process_6_Name', 'Event_UserData_EventData_Name'], 'UserID': ['Event_System_Security_#attributes_UserID', 'Event_EventData_UserId'], 'Task': ['Event_System_Task', 'Event_EventData_Task', 'Event_RenderingInfo_Task'], 'SystemTime': ['Event_System_TimeCreated_#attributes_SystemTime'], 'Version': ['Event_System_Version', 'Event_EventData_Version', 'Event_UserData_MemoryExhaustionInfo_ProcessInfo_Process_1_Version', 'Event_UserData_MemoryExhaustionInfo_ProcessInfo_Process_2_Version', 'Event_UserData_MemoryExhaustionInfo_ProcessInfo_Process_3_Version', 'Event_UserData_MemoryExhaustionInfo_ProcessInfo_Process_4_Version', 'Event_UserData_MemoryExhaustionInfo_ProcessInfo_Process_5_Version', 'Event_UserData_MemoryExhaustionInfo_ProcessInfo_Process_6_Version'], 'Status': ['Event_UserData_ChangingDefaultPrinter_Status', 'Event_EventData_Status', 'Event_UserData_EventData_Status'], 'ActivityID': ['Event_System_Correlation_#attributes_ActivityID', 'Event_EventData_ActivityId'], 'Context': ['Event_UserData_LoadPluginFailed_Context', 'Event_UserData_CertNotificationData_#attributes_Context'], 'ErrorCode': ['Event_UserData_LoadPluginFailed_ErrorCode', 'Event_EventData_ErrorCode', 'Event_UserData_CbsUpdateChangeState_ErrorCode', 'Event_UserData_CbsPackageChangeState_ErrorCode', 'Event_ProcessingErrorData_ErrorCode', 'Event_EventData_errorCode'], 'AppId': ['Event_EventData_AppId', 'Event_EventData_AppID'], 'DCName': ['Event_EventData_DCName'], 'Binary': ['Event_EventData_Binary'], 'Qualifiers': ['Event_System_EventID_#attributes_Qualifiers'], 'Security': ['Event_System_Security'], 'Path': ['Event_EventData_Path'], 'ScriptBlockText': ['Event_EventData_ScriptBlockText'], 'param1': ['Event_EventData_param1', 'Event_UserData_EventXML_Param1', 'Event_EventData_Param1'], 'param2': ['Event_EventData_param2', 'Event_UserData_EventXML_Param2', 'Event_EventData_Param2'], 'ContextInfo': ['Event_EventData_ContextInfo'], 'Payload': ['Event_EventData_Payload'], 'UserData': ['Event_EventData_UserData'], 'State': ['Event_EventData_State'], 'EventType': ['Event_UserData_InvalidCommitLimitExhaustion_EventType'], 'AccountName': ['Event_UserData_CertNotificationData_#attributes_AccountName', 'Event_EventData_AccountName'], 'ProcessName': ['Event_UserData_CertNotificationData_#attributes_ProcessName', 'Event_EventData_ProcessName'], 'LogonType': ['Event_EventData_LogonType'], 'TaskName': ['Event_EventData_TaskName'], 'Message': ['Event_EventData_message', 'Event_RenderingInfo_Message', 'Event_EventData_Message'], 'Provider': ['Event_RenderingInfo_Provider'], 'updateGuid': ['Event_EventData_updateGuid'], 'updateRevisionNumber': ['Event_EventData_updateRevisionNumber'], 'updateTitle': ['Event_EventData_updateTitle', 'Event_EventData_UpdateTitle'], 'DeviceName': ['Event_EventData_DeviceName', 'Event_EventData_Prop_DeviceName'], 'DeviceNameLength': ['Event_EventData_DeviceNameLength'], 'ClientProcessId': ['Event_UserData_Operation_ClientFailure_ClientProcessId'], 'PossibleCause': ['Event_UserData_Operation_ClientFailure_PossibleCause', 'Event_UserData_Operation_TemporaryEssStarted_PossibleCause'], 'User': ['Event_UserData_Operation_ClientFailure_User', 'Event_UserData_Operation_TemporaryEssStarted_User', 'Event_EventData_User', 'Event_UserData_EventXML_User'], 'ProviderName': ['Event_UserData_Operation_StartedOperational_ProviderName'], 'Query': ['Event_UserData_Operation_TemporaryEssStarted_Query'], 'value': ['Event_EventData_value', 'Event_EventData_Value'], 'Action': ['Event_EventData_Action', 'Event_UserData_CertNotificationData_Action'], 'ApplicationPath': ['Event_EventData_ApplicationPath'], 'ModifyingApplication': ['Event_EventData_ModifyingApplication'], 'Origin': ['Event_EventData_Origin'], 'Protocol': ['Event_EventData_Protocol', 'Event_EventData_protocol'], 'RuleName': ['Event_EventData_RuleName'], 'SchemaVersion': ['Event_EventData_SchemaVersion'], 'ServiceName': ['Event_EventData_ServiceName'], 'Filename': ['Event_EventData_Filename', 'Event_UserData_EventData_FileName', 'Event_EventData_FileName'], 'PackagePath': ['Event_EventData_PackagePath'], 'FileNameBuffer': ['Event_EventData_FileNameBuffer'], 'UserName': ['Event_UserData_EventData_UserName', 'Event_EventData_UserName', 'Event_EventData_userName', 'Event_EventData_Username'], 'ShareName': ['Event_UserData_EventData_ShareName', 'Event_EventData_ShareName'], 'NewState': ['Event_EventData_NewState'], 'Param3': ['Event_UserData_EventXML_Param3', 'Event_EventData_param3'], 'EventSourceName': ['Event_System_Provider_#attributes_EventSourceName'], 'NumberOfGroupPolicyObjects': ['Event_EventData_NumberOfGroupPolicyObjects'], 'ProcessingMode': ['Event_EventData_ProcessingMode'], 'ProcessingTimeInMilliseconds': ['Event_EventData_ProcessingTimeInMilliseconds'], 'HostName': ['Event_EventData_HostName'], 'Ipaddress': ['Event_EventData_Ipaddress', 'Event_EventData_IpAddress'], 'NewTime': ['Event_EventData_NewTime'], 'OldTime': ['Event_EventData_OldTime'], 'HiveName': ['Event_EventData_HiveName'], 'ErrorDescription': ['Event_EventData_ErrorDescription'], 'Address': ['Event_EventData_Address', 'Event_UserData_EventXML_Address'], 'AddressLength': ['Event_EventData_AddressLength'], 'QueryName': ['Event_EventData_QueryName'], 'TSId': ['Event_EventData_TSId'], 'UserSid': ['Event_EventData_UserSid', 'Event_UserData_EventXML_UserSid', 'Event_EventData_UserSID'], 'DeviceTime': ['Event_EventData_DeviceTime'], 'DeviceVersionMajor': ['Event_EventData_DeviceVersionMajor'], 'DeviceVersionMinor': ['Event_EventData_DeviceVersionMinor'], 'FinalStatus': ['Event_EventData_FinalStatus'], 'ImagePath': ['Event_EventData_ImagePath'], 'ServiceType': ['Event_EventData_ServiceType'], 'StartType': ['Event_EventData_StartType'], 'ExtensionId': ['Event_EventData_ExtensionId'], 'ExtensionName': ['Event_EventData_ExtensionName'], 'ShutdownActionType': ['Event_EventData_ShutdownActionType'], 'ShutdownEventCode': ['Event_EventData_ShutdownEventCode'], 'ShutdownReason': ['Event_EventData_ShutdownReason'], 'Group': ['Event_EventData_Group'], 'IdleStateCount': ['Event_EventData_IdleStateCount'], 'Number': ['Event_EventData_Number', 'Event_EventData_number'], 'BootMode': ['Event_EventData_BootMode'], 'BuildVersion': ['Event_EventData_BuildVersion'], 'MajorVersion': ['Event_EventData_MajorVersion'], 'MinorVersion': ['Event_EventData_MinorVersion'], 'QfeVersion': ['Event_EventData_QfeVersion'], 'ServiceVersion': ['Event_EventData_ServiceVersion'], 'StartTime': ['Event_EventData_StartTime', 'Event_UserData_CompatibilityFixEvent_StartTime'], 'StopTime': ['Event_EventData_StopTime'], 'TimeSource': ['Event_EventData_TimeSource'], 'Targetname': ['Event_EventData_Targetname'], 'Caption': ['Event_EventData_Caption'], 'ErrorMessage': ['Event_EventData_ErrorMessage'], 'RetryMinutes': ['Event_EventData_RetryMinutes'], 'Description': ['Event_EventData_Description'], 'Type': ['Event_EventData_Type'], 'OperationType': ['Event_EventData_OperationType'], 'CommandLine': ['Event_EventData_CommandLine'], 'PackageName': ['Event_EventData_PackageName'], 'Data': ['Event_EventData_Data', 'Event_EventData_Data_#text'], 'LogonId': ['Event_EventData_LogonId'], 'ServerName': ['Event_EventData_ServerName', 'Event_EventData_serverName'], 'ObjectName': ['Event_EventData_ObjectName'], 'AccessList': ['Event_EventData_AccessList'], 'AccessMask': ['Event_EventData_AccessMask'], 'HandleId': ['Event_EventData_HandleId'], 'ObjectServer': ['Event_EventData_ObjectServer'], 'ObjectType': ['Event_EventData_ObjectType'], 'SubjectDomainName': ['Event_EventData_SubjectDomainName'], 'SubjectLogonId': ['Event_EventData_SubjectLogonId'], 'SubjectUserName': ['Event_EventData_SubjectUserName'], 'SubjectUserSid': ['Event_EventData_SubjectUserSid'], 'NewProcessId': ['Event_EventData_NewProcessId'], 'NewProcessName': ['Event_EventData_NewProcessName'], 'ParentProcessName': ['Event_EventData_ParentProcessName'], 'TargetDomainName': ['Event_EventData_TargetDomainName'], 'TargetLogonId': ['Event_EventData_TargetLogonId'], 'TargetUserName': ['Event_EventData_TargetUserName'], 'TargetUserSid': ['Event_EventData_TargetUserSid'], 'TokenElevationType': ['Event_EventData_TokenElevationType'], 'NewValue': ['Event_EventData_NewValue'], 'ObjectValueName': ['Event_EventData_ObjectValueName'], 'OldValue': ['Event_EventData_OldValue'], 'Properties': ['Event_EventData_Properties'], 'PrivilegeList': ['Event_EventData_PrivilegeList'], 'Service': ['Event_EventData_Service'], 'AuthenticationPackageName': ['Event_EventData_AuthenticationPackageName'], 'ImpersonationLevel': ['Event_EventData_ImpersonationLevel'], 'IpPort': ['Event_EventData_IpPort'], 'KeyLength': ['Event_EventData_KeyLength'], 'LmPackageName': ['Event_EventData_LmPackageName'], 'LogonGuid': ['Event_EventData_LogonGuid'], 'LogonProcessName': ['Event_EventData_LogonProcessName'], 'TransmittedServices': ['Event_EventData_TransmittedServices'], 'WorkstationName': ['Event_EventData_WorkstationName'], 'CallerProcessName': ['Event_EventData_CallerProcessName'], 'TargetSid': ['Event_EventData_TargetSid'], 'TaskContentNew': ['Event_EventData_TaskContentNew'], 'AuditPolicyChanges': ['Event_EventData_AuditPolicyChanges'], 'SourceProcessId': ['Event_EventData_SourceProcessId'], 'TargetProcessId': ['Event_EventData_TargetProcessId'], 'TransactionId': ['Event_EventData_TransactionId'], 'TargetInfo': ['Event_EventData_TargetInfo'], 'TargetLogonGuid': ['Event_EventData_TargetLogonGuid'], 'TargetServerName': ['Event_EventData_TargetServerName'], 'Details': ['Event_EventData_Details'], 'PackageFullName': ['Event_EventData_PackageFullName'], 'processPath': ['Event_EventData_processPath'], 'Provider_Name': ['Event_System_Provider_#attributes_Name'], 'Accesses': ['Event_EventData_Accesses'], 'AccountDomain': ['Event_EventData_AccountDomain'], 'AccountExpires': ['Event_EventData_AccountExpires'], 'AddonName': ['Event_EventData_AddonName'], 'AllowedToDelegateTo': ['Event_EventData_AllowedToDelegateTo'], 'Application': ['Event_EventData_Application'], 'AttributeLDAPDisplayName': ['Event_EventData_AttributeLDAPDisplayName'], 'AttributeValue': ['Event_EventData_AttributeValue'], 'AuditSourceName': ['Event_EventData_AuditSourceName'], 'CallingProcessName': ['Event_EventData_CallingProcessName'], 'CallTrace': ['Event_EventData_CallTrace'], 'Company': ['Event_EventData_Company'], 'CreationUtcTime': ['Event_EventData_CreationUtcTime'], 'CurrentDirectory': ['Event_EventData_CurrentDirectory'], 'DestinationAddress': ['Event_EventData_DestinationAddress'], 'DestinationHostname': ['Event_EventData_DestinationHostname'], 'DestinationIp': ['Event_EventData_DestinationIp'], 'DestinationIsIpv6': ['Event_EventData_DestinationIsIpv6'], 'DestinationPort': ['Event_EventData_DestinationPort'], 'DestinationPortName': ['Event_EventData_DestinationPortName'], 'DestPort': ['Event_EventData_DestPort'], 'Detail': ['Event_EventData_Detail'], 'DetectionSource': ['Event_EventData_DetectionSource'], 'DeviceClassName': ['Event_EventData_DeviceClassName'], 'DeviceDescription': ['Event_EventData_DeviceDescription'], 'DisplayName': ['Event_EventData_DisplayName'], 'EngineVersion': ['Event_EventData_EngineVersion'], 'EventSourceId': ['Event_EventData_EventSourceId'], 'ExtraInfo': ['Event_EventData_ExtraInfo'], 'FailureCode': ['Event_EventData_FailureCode'], 'FailureReason': ['Event_EventData_FailureReason'], 'FileVersion': ['Event_EventData_FileVersion'], 'FilterHostProcessID': ['Event_EventData_FilterHostProcessID'], 'GrantedAccess': ['Event_EventData_GrantedAccess'], 'GroupDomain': ['Event_EventData_GroupDomain'], 'GroupName': ['Event_EventData_GroupName'], 'GroupSid': ['Event_EventData_GroupSid'], 'Hash': ['Event_EventData_Hash'], 'Hashes': ['Event_EventData_Hashes'], 'HomeDirectory': ['Event_EventData_HomeDirectory'], 'HomePath': ['Event_EventData_HomePath'], 'HostApplication': ['Event_EventData_HostApplication'], 'HostVersion': ['Event_EventData_HostVersion'], 'Image': ['Event_EventData_Image'], 'ImageLoaded': ['Event_EventData_ImageLoaded'], 'Initiated': ['Event_EventData_Initiated'], 'IntegrityLevel': ['Event_EventData_IntegrityLevel'], 'LayerRTID': ['Event_EventData_LayerRTID'], 'LDAPDisplayName': ['Event_EventData_LDAPDisplayName'], 'LogonHours': ['Event_EventData_LogonHours'], 'NewName': ['Event_EventData_NewName'], 'NewThreadId': ['Event_EventData_NewThreadId'], 'NewUacValue': ['Event_EventData_NewUacValue'], 'NotificationPackageName': ['Event_EventData_NotificationPackageName'], 'ObjectClass': ['Event_EventData_ObjectClass'], 'OldUacValue': ['Event_EventData_OldUacValue'], 'OriginalFileName': ['Event_EventData_OriginalFileName'], 'ParentCommandLine': ['Event_EventData_ParentCommandLine'], 'ParentImage': ['Event_EventData_ParentImage'], 'ParentProcessGuid': ['Event_EventData_ParentProcessGuid'], 'ParentProcessId': ['Event_EventData_ParentProcessId'], 'PasswordLastSet': ['Event_EventData_PasswordLastSet'], 'PerfStateCount': ['Event_EventData_PerfStateCount'], 'PipeName': ['Event_EventData_PipeName'], 'PreviousTime': ['Event_EventData_PreviousTime'], 'PrimaryGroupId': ['Event_EventData_PrimaryGroupId'], 'ProcessCommandLine': ['Event_EventData_ProcessCommandLine'], 'ProcessGuid': ['Event_EventData_ProcessGuid'], 'Product': ['Event_EventData_Product'], 'ProfilePath': ['Event_EventData_ProfilePath'], 'ProtocolHostProcessID': ['Event_EventData_ProtocolHostProcessID'], 'PuaCount': ['Event_EventData_PuaCount'], 'PuaPolicyId': ['Event_EventData_PuaPolicyId'], 'Publisher': ['Event_EventData_Publisher'], 'QueryResults': ['Event_EventData_QueryResults'], 'QueryStatus': ['Event_EventData_QueryStatus'], 'RelativeTargetName': ['Event_EventData_RelativeTargetName'], 'ResourceManager': ['Event_EventData_ResourceManager'], 'SAMAccountName': ['Event_EventData_SamAccountName'], 'ScriptPath': ['Event_EventData_ScriptPath'], 'SecurityPackageName': ['Event_EventData_SecurityPackageName'], 'ServerID': ['Event_EventData_ServerID'], 'ServerURL': ['Event_EventData_ServerURL'], 'ServicePrincipalNames': ['Event_EventData_ServicePrincipalNames'], 'ShareLocalPath': ['Event_EventData_ShareLocalPath'], 'SidHistory': ['Event_EventData_SidHistory'], 'Signature': ['Event_EventData_Signature'], 'SignatureStatus': ['Event_EventData_SignatureStatus'], 'Signed': ['Event_EventData_Signed'], 'SourceAddress': ['Event_EventData_SourceAddress'], 'SourceHostname': ['Event_EventData_SourceHostname'], 'SourceImage': ['Event_EventData_SourceImage'], 'SourceIp': ['Event_EventData_SourceIp'], 'SourceNetworkAddress': ['Event_EventData_SourceNetworkAddress'], 'SourceIsIpv6': ['Event_EventData_SourceIsIpv6'], 'SourcePort': ['Event_EventData_SourcePort'], 'SourcePortName': ['Event_EventData_SourcePortName'], 'SourceProcessGuid': ['Event_EventData_SourceProcessGuid'], 'StartAddress': ['Event_EventData_StartAddress'], 'StartFunction': ['Event_EventData_StartFunction'], 'StartModule': ['Event_EventData_StartModule'], 'SubStatus': ['Event_EventData_SubStatus'], 'TargetFileName': ['Event_EventData_TargetFilename'], 'TargetImage': ['Event_EventData_TargetImage'], 'TargetProcessAddress': ['Event_EventData_TargetProcessAddress'], 'TargetProcessGuid': ['Event_EventData_TargetProcessGuid'], 'TaskContent': ['Event_EventData_TaskContent'], 'TerminalSessionId': ['Event_EventData_TerminalSessionId'], 'ThrottleStateCount': ['Event_EventData_ThrottleStateCount'], 'TicketEncryptionType': ['Event_EventData_TicketEncryptionType'], 'TicketOptions': ['Event_EventData_TicketOptions'], 'UserAccountControl': ['Event_EventData_UserAccountControl'], 'UserParameters': ['Event_EventData_UserParameters'], 'UserPrincipalName': ['Event_EventData_UserPrincipalName'], 'UserWorkstations': ['Event_EventData_UserWorkstations'], 'UtcTime': ['Event_EventData_UtcTime'], 'Workstation': ['Event_EventData_Workstation'], 'ParentIntegrityLevel': ['Event_EventData_ParentIntegrityLevel'], 'ParentUser': ['Event_EventData_ParentUser']} - +# 创建多进程锁以确保线程安全 l = multiprocessing.Lock() - - - - included={} DB="" DBconn="" def search_db(query,DB): + """ + 在数据库中执行SQL查询并返回结果。 + + :param query: SQL查询字符串 + :param DB: 数据库文件路径 + :return: 查询结果列表 + """ # Connect to the database # conn = sqlite3.connect(DB) # cursor = conn.cursor() @@ -33,10 +37,8 @@ def search_db(query,DB): #query="SELECT Original_Event_Log FROM Events WHERE ImageLoaded LIKE '%\\\\Temp\\\\%' ESCAPE '\\'" #query="SELECT ImageLoaded FROM AllEvents GROUP BY ImageLoaded" #name = 'John Doe' - # Execute the query try: - cursor.execute(query.replace("Imphash","Hashes").replace("sha1","Hashes").replace("md5","Hashes").replace("sha256","Hashes").replace("*","Original_Event_Log,SystemTime")) except Exception as e: #print(f"Error {str(e)} with query : \n"+query) @@ -47,12 +49,15 @@ def search_db(query,DB): # Print the results #for row in results: # print(row) - # Close the connection - return results - def optimised_search(DB,output=""): + """ + 使用优化后的方法在数据库中执行一系列查询规则,并生成检测报告。 + + :param DB: 数据库文件路径 + :param output: 输出文件的前缀 + """ global DBconn # DB = DB # conn = sqlite3.connect(DB) @@ -68,6 +73,7 @@ def optimised_search(DB,output=""): #tic = time.time() Detections = {'DateTime' : [],'title': [], 'description': [], 'Original_Event_Log': [], 'status': [], 'author': [], 'tags': [], 'falsepositives': [], 'level': [], 'rule': [], 'id': [], 'filename': []} + # 遍历所有的规则,执行查询并收集结果 for usecase in rules: query = usecase["rule"] detected_events=search_db(query, DB) @@ -97,6 +103,7 @@ def optimised_search(DB,output=""): grouped = Report['title'].value_counts() cursor = DBconn.cursor() + # 写入Excel文件 writer = pd.ExcelWriter(output+'_'+'Detections.xlsx', engine='xlsxwriter', options={'encoding': 'utf-8'}) grouped.to_excel(writer, sheet_name='Result Summary') Report.to_excel(writer, sheet_name='Detailed Report', index=False) @@ -493,71 +500,115 @@ def optimised_parse_mp(file): 'ParentUser': ['Event_EventData_ParentUser']} parser = PyEvtxParser(str(file)) - for record in parser.records_json(): - - data=flatten(json.loads(record["data"])) - for key in mapping.keys(): - requiredfield = "None" - for field in mapping[key]: - if field in data: - requiredfield=field - break - - if requiredfield!="None": - if isinstance(data[requiredfield], list): - Alldata[key].append(",".join(data[requiredfield])) - else: - Alldata[key].append(str(data[requiredfield])) +for record in parser.records_json(): + # 将JSON格式的事件数据展平,方便后续处理 + data = flatten(json.loads(record["data"])) + + for key in mapping.keys(): + requiredfield = "None" + # 遍历mapping中的字段,找到第一个在数据中存在的字段 + for field in mapping[key]: + if field in data: + requiredfield = field + break + + if requiredfield != "None": + # 如果找到的字段值是一个列表,则将列表中的值以逗号分隔并加入到Alldata中 + if isinstance(data[requiredfield], list): + Alldata[key].append(",".join(data[requiredfield])) else: - if field == "Original_Event_Log": - Alldata[key].append(record["data"]) - #Alldata[key].append(None) - else: - - Alldata[key].append(None) + # 如果不是列表,直接将值转换为字符串并加入到Alldata中 + Alldata[key].append(str(data[requiredfield])) + else: + # 如果没有找到任何匹配的字段 + if field == "Original_Event_Log": + # 对于原始事件日志,将整个事件数据加入到Alldata中 + Alldata[key].append(record["data"]) + else: + # 对于其他未找到的字段,添加None值 + Alldata[key].append(None) #print("finished Parsing") #print(Alldata) + + # 使用锁来确保多进程环境下的数据插入是线程安全的 l.acquire() #print("Inserting data into "+DB) insert_into_db_mp(Alldata, DB) l.release() - print("Done Parsing : "+str(file)) - + print("Done Parsing : " + str(file)) def clean(DBName): - # specify the path to the file to be removed + """ + 清理指定的SQLite数据库文件。 + + 参数: + - DBName: SQLite数据库文件名 + """ file_path = DBName - # check if the file exists + # 检查文件是否存在 if os.path.isfile(file_path): - # remove the file + # 删除文件 os.remove(file_path) print(f"Temp Database has been removed.") else: print(f"Temp Database does not exist.") - def init(l): + """ + 初始化进程的全局锁。 + + 参数: + - l: 锁对象 + """ global lock lock = l - -def Sigma_Analyze(Path, rules,output, DBName="Events.sqlite"): - global l,DBconn,DB +def Sigma_Analyze(Path, rules, output, DBName="Events.sqlite"): + """ + 使用Sigma规则分析Windows事件日志。 + + 参数: + - Path: 事件日志文件路径 + - rules: Sigma规则文件路径 + - output: 输出文件名前缀 + - DBName: SQLite数据库文件名 + """ + global l, DBconn, DB tic_start = time.time() - DB=DBName + DB = DBName + # 创建SQLite数据库 Create_DB(DB) print("Analyzing logs using Sigma with below config : ") print(f"Logs Path : {Path}\nSigma Rules file : {rules}\nProfile : {output}") + + # 使用多进程加速处理 pool = multiprocessing.Pool(multiprocessing.cpu_count(), initializer=init, initargs=(l,)) + + # 自动检测日志文件 files = auto_detect(Path) + + # 多进程解析日志文件 results = pool.map(optimised_parse_mp, files) + + # 将Sigma规则插入数据库 RulesToDB(rules, DB) + + # 连接到数据库 DBconn = sqlite3.connect(DB) - optimised_search(DB,output) + + # 优化搜索,应用Sigma规则 + optimised_search(DB, output) + + # 清理临时数据库 clean(DBName) + + # 关闭数据库连接 DBconn.close() + toc_end = time.time() - print("Analysis results availble as CSV file with Name "+output+'_'+'Detections.csv') - print("Analysis results availble as Excel file with statistics as "+output+'_'+'Detections.xlsx') + # 打印分析结果的输出文件名 + print("Analysis results available as CSV file with Name " + output + '_' + 'Detections.csv') + print("Analysis results available as Excel file with statistics as " + output + '_' + 'Detections.xlsx') +