Added the report

main
lanmory 1 month ago
parent 444d0f2300
commit 07a0f43de5

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

@ -1,15 +1,15 @@
#!/bin/bash #!/bin/bash
if [ "$#" -ne 1 ]; then if [ "$#" -ne 1 ]; then
echo "Please enter rules path as argument " echo "Please enter rules path as argument "
exit 1 exit 1
fi fi
echo "Getting Sigma Converter Toot" echo "Getting Sigma Converter Toot"
git clone https://github.com/SigmaHQ/legacy-sigmatools.git git clone https://github.com/SigmaHQ/legacy-sigmatools.git
echo "Converting sigma rules " echo "Converting sigma rules "
legacy-sigmatools/tools/sigmac --recurse --target sqlite --backend-option table=Events --output-format json -d $1 -c lib/config/sigma-converter-rules-config.yml -o rules.json --output-fields title,id,description,author,tags,level,falsepositives,filename,status legacy-sigmatools/tools/sigmac --recurse --target sqlite --backend-option table=Events --output-format json -d $1 -c lib/config/sigma-converter-rules-config.yml -o rules.json --output-fields title,id,description,author,tags,level,falsepositives,filename,status
echo "Rules created with file name : rules.json " echo "Rules created with file name : rules.json "

@ -1,11 +1,11 @@
#!/bin/bash #!/bin/bash
echo "Getting Sigma Converter Toot" echo "Getting Sigma Converter Toot"
git clone https://github.com/SigmaHQ/legacy-sigmatools.git git clone https://github.com/SigmaHQ/legacy-sigmatools.git
echo "Getting Sigma Rules" echo "Getting Sigma Rules"
git clone https://github.com/SigmaHQ/sigma.git git clone https://github.com/SigmaHQ/sigma.git
echo "Converting sigma rules " echo "Converting sigma rules "
legacy-sigmatools/tools/sigmac --recurse --target sqlite --backend-option table=Events --output-format json -d sigma/rules/windows/ -c lib/config/sigma-converter-rules-config.yml -o rules.json --output-fields title,id,description,author,tags,level,falsepositives,filename,status legacy-sigmatools/tools/sigmac --recurse --target sqlite --backend-option table=Events --output-format json -d sigma/rules/windows/ -c lib/config/sigma-converter-rules-config.yml -o rules.json --output-fields title,id,description,author,tags,level,falsepositives,filename,status
echo "Rules created with file name : rules.json " echo "Rules created with file name : rules.json "

File diff suppressed because it is too large Load Diff

@ -1,99 +1,99 @@
[ [
{ {
"name": "Suspicious User Agent", "name": "Suspicious User Agent",
"severity": "High", "severity": "High",
"query": "SELECT * FROM events WHERE UserAgent LIKE '%python%' OR UserAgent LIKE '%ruler%' OR UserAgent LIKE '%curl%' OR UserAgent LIKE '%Wget%' OR UserAgent LIKE '%python-requests%' OR UserAgent LIKE '%AADInternals%' OR UserAgent LIKE '%azurehound%' OR UserAgent LIKE '%axios%' OR UserAgent LIKE '%BAV2ROPC%' " "query": "SELECT * FROM events WHERE UserAgent LIKE '%python%' OR UserAgent LIKE '%ruler%' OR UserAgent LIKE '%curl%' OR UserAgent LIKE '%Wget%' OR UserAgent LIKE '%python-requests%' OR UserAgent LIKE '%AADInternals%' OR UserAgent LIKE '%azurehound%' OR UserAgent LIKE '%axios%' OR UserAgent LIKE '%BAV2ROPC%' "
}, },
{ {
"name": "User adding or removing Inbox Rule", "name": "User adding or removing Inbox Rule",
"severity": "Medium", "severity": "Medium",
"query": "SELECT * FROM events WHERE Operation LIKE '%InboxRule%' OR Operation LIKE 'Set-Mailbox' OR Operation LIKE '%DeliverToMailboxAndForward%' OR Operation LIKE '%ForwardingAddress%' OR Operation LIKE '%ForwardingAddress%' " "query": "SELECT * FROM events WHERE Operation LIKE '%InboxRule%' OR Operation LIKE 'Set-Mailbox' OR Operation LIKE '%DeliverToMailboxAndForward%' OR Operation LIKE '%ForwardingAddress%' OR Operation LIKE '%ForwardingAddress%' "
}, },
{ {
"name": "After Hours Activity", "name": "After Hours Activity",
"severity": "Medium", "severity": "Medium",
"query": "SELECT * FROM events WHERE (CASE WHEN CAST(substr(CreationTime, 12, 2) AS INTEGER) < 0 THEN 24 + (CAST(substr(CreationTime, 12, 2) AS INTEGER)) ELSE CAST(substr(CreationTime, 12, 2) AS INTEGER) END >= 20 OR CASE WHEN CAST(substr(CreationTime, 12, 2) AS INTEGER) < 0 THEN 24 + (CAST(substr(CreationTime, 12, 2) AS INTEGER)) ELSE CAST(substr(CreationTime, 12, 2) AS INTEGER) END < 6) AND NOT (Operation LIKE 'File%' OR Operation LIKE 'List%' OR Operation LIKE 'Page%' OR Operation LIKE '%UserLogin%');" "query": "SELECT * FROM events WHERE (CASE WHEN CAST(substr(CreationTime, 12, 2) AS INTEGER) < 0 THEN 24 + (CAST(substr(CreationTime, 12, 2) AS INTEGER)) ELSE CAST(substr(CreationTime, 12, 2) AS INTEGER) END >= 20 OR CASE WHEN CAST(substr(CreationTime, 12, 2) AS INTEGER) < 0 THEN 24 + (CAST(substr(CreationTime, 12, 2) AS INTEGER)) ELSE CAST(substr(CreationTime, 12, 2) AS INTEGER) END < 6) AND NOT (Operation LIKE 'File%' OR Operation LIKE 'List%' OR Operation LIKE 'Page%' OR Operation LIKE '%UserLogin%');"
}, },
{ {
"name": "Possible file exfiltration", "name": "Possible file exfiltration",
"severity": "Low", "severity": "Low",
"query": "SELECT * FROM events WHERE Operation LIKE '%FileUploaded%' " "query": "SELECT * FROM events WHERE Operation LIKE '%FileUploaded%' "
}, },
{ {
"name": "Admin searching in emails of other users", "name": "Admin searching in emails of other users",
"severity": "Low", "severity": "Low",
"query": "SELECT * FROM events WHERE Operation LIKE '%SearchStarted%' OR Operation LIKE '%SearchExportDownloaded%' OR Operation LIKE '%ViewedSearchExported%' " "query": "SELECT * FROM events WHERE Operation LIKE '%SearchStarted%' OR Operation LIKE '%SearchExportDownloaded%' OR Operation LIKE '%ViewedSearchExported%' "
}, },
{ {
"name": "Strong Authentication Disabled", "name": "Strong Authentication Disabled",
"severity": "medium", "severity": "medium",
"query": "SELECT * FROM events WHERE Operation LIKE '%disable strong authentication%'" "query": "SELECT * FROM events WHERE Operation LIKE '%disable strong authentication%'"
}, },
{ {
"name": "User added to admin group", "name": "User added to admin group",
"severity": "High", "severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%add member to group%' AND ModifiedProperties Like '%admin%') OR ( Operation LIKE '%AddedToGroup%' AND TargetUserOrGroupName Like '%admin%') " "query": "SELECT * FROM events WHERE ( Operation LIKE '%add member to group%' AND ModifiedProperties Like '%admin%') OR ( Operation LIKE '%AddedToGroup%' AND TargetUserOrGroupName Like '%admin%') "
}, },
{ {
"name": "New Policy created", "name": "New Policy created",
"severity": "Medium", "severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%add policy%' ) " "query": "SELECT * FROM events WHERE ( Operation LIKE '%add policy%' ) "
}, },
{ {
"name": "Security Alert triggered", "name": "Security Alert triggered",
"severity": "Medium", "severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%AlertTriggered%' AND NOT Severity Like '%Low%') " "query": "SELECT * FROM events WHERE ( Operation LIKE '%AlertTriggered%' AND NOT Severity Like '%Low%') "
}, },
{ {
"name": "Transport rules ( mail flow rules ) modified", "name": "Transport rules ( mail flow rules ) modified",
"severity": "High", "severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%TransportRule%') " "query": "SELECT * FROM events WHERE ( Operation LIKE '%TransportRule%') "
}, },
{ {
"name": "An application was registered in Azure AD", "name": "An application was registered in Azure AD",
"severity": "Medium", "severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%Add service principal.%') " "query": "SELECT * FROM events WHERE ( Operation LIKE '%Add service principal.%') "
}, },
{ {
"name": "Add app role assignment grant to user", "name": "Add app role assignment grant to user",
"severity": "Medium", "severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%Add app role assignment grant to user.%') " "query": "SELECT * FROM events WHERE ( Operation LIKE '%Add app role assignment grant to user.%') "
}, },
{ {
"name": "eDiscovery Abuse", "name": "eDiscovery Abuse",
"severity": "High", "severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%New-ComplianceSearch%') " "query": "SELECT * FROM events WHERE ( Operation LIKE '%New-ComplianceSearch%') "
}, },
{ {
"name": "Operations affecting OAuth Applications", "name": "Operations affecting OAuth Applications",
"severity": "Medium", "severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation = 'Add application.' OR Operation = 'Update application' OR Operation = 'Add service principal.' OR Operation = 'Update application Certificates and secrets management' OR Operation = 'Update applicationUpdate service principal.' OR Operation = 'Add app role assignment grant to user.' OR Operation = 'Add delegated permission grant.' OR Operation = 'Add owner to application.' OR Operation = 'Add owner to service principal.') " "query": "SELECT * FROM events WHERE ( Operation = 'Add application.' OR Operation = 'Update application' OR Operation = 'Add service principal.' OR Operation = 'Update application Certificates and secrets management' OR Operation = 'Update applicationUpdate service principal.' OR Operation = 'Add app role assignment grant to user.' OR Operation = 'Add delegated permission grant.' OR Operation = 'Add owner to application.' OR Operation = 'Add owner to service principal.') "
}, },
{ {
"name": "Suspicious Operations affecting Mailbox ", "name": "Suspicious Operations affecting Mailbox ",
"severity": "Medium", "severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation = 'Set-MailboxJunkEmailConfiguration' OR Operation = 'SoftDelete' OR Operation = 'SendAs' OR Operation = 'HardDelete' OR Operation = 'MoveToDeletedItems' ) " "query": "SELECT * FROM events WHERE ( Operation = 'Set-MailboxJunkEmailConfiguration' OR Operation = 'SoftDelete' OR Operation = 'SendAs' OR Operation = 'HardDelete' OR Operation = 'MoveToDeletedItems' ) "
}, },
{ {
"name": "Suspicious Operations affecting SharePoint ", "name": "Suspicious Operations affecting SharePoint ",
"severity": "Medium", "severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation = 'AddedToSecureLink' OR Operation = 'SearchQueryPerformed' OR Operation = 'SecureLinkCreated' OR Operation = 'SecureLinkUpdated' OR Operation = 'SharingInvitationCreated' ) " "query": "SELECT * FROM events WHERE ( Operation = 'AddedToSecureLink' OR Operation = 'SearchQueryPerformed' OR Operation = 'SecureLinkCreated' OR Operation = 'SecureLinkUpdated' OR Operation = 'SharingInvitationCreated' ) "
}, },
{ {
"name": "User Modifying RetentionPolicy ", "name": "User Modifying RetentionPolicy ",
"severity": "High", "severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%UnifiedAuditLogRetentionPolicy%' ) " "query": "SELECT * FROM events WHERE ( Operation LIKE '%UnifiedAuditLogRetentionPolicy%' ) "
}, },
{ {
"name": "User Modifying Audit Logging ", "name": "User Modifying Audit Logging ",
"severity": "High", "severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%AdminAuditLogConfig%' ) " "query": "SELECT * FROM events WHERE ( Operation LIKE '%AdminAuditLogConfig%' ) "
}, },
{ {
"name": "String Authentication Disabled ", "name": "String Authentication Disabled ",
"severity": "High", "severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%Disable Strong Authentication.%' ) " "query": "SELECT * FROM events WHERE ( Operation LIKE '%Disable Strong Authentication.%' ) "
} }
] ]

@ -1,99 +1,99 @@
<p align="center"> <p align="center">
<a href="https://github.com/ahmedkhlief/APT-Hunter/releases"><img src="https://img.shields.io/github/v/release/ahmedkhlief/APT-Hunter?color=blue&label=Stable%20Version&style=flat""/></a> <a href="https://github.com/ahmedkhlief/APT-Hunter/releases"><img src="https://img.shields.io/github/v/release/ahmedkhlief/APT-Hunter?color=blue&label=Stable%20Version&style=flat""/></a>
<a href="https://github.com/ahmedkhlief/APT-Hunter/releases"><img src="https://img.shields.io/github/downloads/ahmedkhlief/APT-Hunter/total?style=flat&label=GitHub Downloads&color=blue"/></a> <a href="https://github.com/ahmedkhlief/APT-Hunter/releases"><img src="https://img.shields.io/github/downloads/ahmedkhlief/APT-Hunter/total?style=flat&label=GitHub Downloads&color=blue"/></a>
<a href="https://github.com/ahmedkhlief/APT-Hunter/stargazers"><img src="https://img.shields.io/github/stars/ahmedkhlief/APT-Hunter?style=flat&label=GitHub Stars"/></a> <a href="https://github.com/ahmedkhlief/APT-Hunter/stargazers"><img src="https://img.shields.io/github/stars/ahmedkhlief/APT-Hunter?style=flat&label=GitHub Stars"/></a>
<a href="https://github.com/ahmedkhlief/APT-Hunter/graphs/contributors"><img src="https://img.shields.io/github/contributors/ahmedkhlief/APT-Hunter?label=Contributors&color=blue&style=flat"/></a> <a href="https://github.com/ahmedkhlief/APT-Hunter/graphs/contributors"><img src="https://img.shields.io/github/contributors/ahmedkhlief/APT-Hunter?label=Contributors&color=blue&style=flat"/></a>
</p> </p>
# APT-Hunter # APT-Hunter
APT-Hunter is Threat Hunting tool for windows event logs which made by purple team mindset to detect APT movements hidden in the sea of windows event logs to decrease the time to uncover suspicious activity . APT-Hunter use pre-defined detection rules and focus on statistics to uncover abnormalities which is very effective in compromise assessment . the output produced with timeline that can be analyzed directly from Excel , Timeline Explorer , Timesketch , etc... APT-Hunter is Threat Hunting tool for windows event logs which made by purple team mindset to detect APT movements hidden in the sea of windows event logs to decrease the time to uncover suspicious activity . APT-Hunter use pre-defined detection rules and focus on statistics to uncover abnormalities which is very effective in compromise assessment . the output produced with timeline that can be analyzed directly from Excel , Timeline Explorer , Timesketch , etc...
Full information about the tool and how its used in this article : [introducing-apt-hunter-threat-hunting-tool-using-windows-event-log](https://shells.systems/introducing-apt-hunter-threat-hunting-tool-via-windows-event-log/) Full information about the tool and how its used in this article : [introducing-apt-hunter-threat-hunting-tool-using-windows-event-log](https://shells.systems/introducing-apt-hunter-threat-hunting-tool-via-windows-event-log/)
New Release Info : [APT-HUNTER V3.0 : Rebuilt with Multiprocessing and new cool features](https://shells.systems/apt-hunter-v3-0-rebuilt-with-multiprocessing-and-new-cool-features/) New Release Info : [APT-HUNTER V3.0 : Rebuilt with Multiprocessing and new cool features](https://shells.systems/apt-hunter-v3-0-rebuilt-with-multiprocessing-and-new-cool-features/)
# Author # Author
Twitter : [@ahmed_khlief](https://twitter.com/ahmed_khlief) Twitter : [@ahmed_khlief](https://twitter.com/ahmed_khlief)
Linkedin : [Ahmed Khlief](https://www.linkedin.com/in/ahmed-khlief-499321a7) Linkedin : [Ahmed Khlief](https://www.linkedin.com/in/ahmed-khlief-499321a7)
# Donwload APT-Hunter : # Donwload APT-Hunter :
Download the latest stable version of APT-Hunter with compiled binaries from [Releases](https://github.com/ahmedkhlief/APT-Hunter/releases) page. Download the latest stable version of APT-Hunter with compiled binaries from [Releases](https://github.com/ahmedkhlief/APT-Hunter/releases) page.
# How to Use APT-Hunter # How to Use APT-Hunter
APT-Hunter built using python3 so in order to use the tool you need to install the required libraries. APT-Hunter built using python3 so in order to use the tool you need to install the required libraries.
`python3 -m pip install -r requirements.txt` `python3 -m pip install -r requirements.txt`
APT-Hunter is easy to use you just use the argument -h to print help to see the options needed . APT-Hunter is easy to use you just use the argument -h to print help to see the options needed .
` python3 APT-Hunter.py -h` ` python3 APT-Hunter.py -h`
![APT-Hunter Help](screenshots/APTHunter-Help.png) ![APT-Hunter Help](screenshots/APTHunter-Help.png)
![APT-Hunter Analyzing with all report ](screenshots/APTHunter-Allreport.png) ![APT-Hunter Analyzing with all report ](screenshots/APTHunter-Allreport.png)
![APT-Hunter commandline output ](screenshots/APTHunter-output.png) ![APT-Hunter commandline output ](screenshots/APTHunter-output.png)
![APT-Hunter Excel Output ](screenshots/APTHunter-Excel.png) ![APT-Hunter Excel Output ](screenshots/APTHunter-Excel.png)
![APT-Hunter CSV Output with Time Sketch](screenshots/APTHunter-Timeline-Explorer.png) ![APT-Hunter CSV Output with Time Sketch](screenshots/APTHunter-Timeline-Explorer.png)
# Exmaples : # Exmaples :
Analyzing EVTX files , you can provide directory containing the logs or single file , APT hunter will detect the type of logs . Analyzing EVTX files , you can provide directory containing the logs or single file , APT hunter will detect the type of logs .
`python3 APT-Hunter.py -p /opt/wineventlogs/ -o Project1 -allreport` `python3 APT-Hunter.py -p /opt/wineventlogs/ -o Project1 -allreport`
Adding time frame to focus on specific timeline : Adding time frame to focus on specific timeline :
`python3 APT-Hunter.py -p /opt/wineventlogs/ -o Project1 -allreport -start 2022-04-03 -end 2022-04-05T20:56` `python3 APT-Hunter.py -p /opt/wineventlogs/ -o Project1 -allreport -start 2022-04-03 -end 2022-04-05T20:56`
Hunting using String or regex : Hunting using String or regex :
`python3 APT-Hunter.py -hunt "psexec" -p /opt/wineventlogs/ -o Project2` `python3 APT-Hunter.py -hunt "psexec" -p /opt/wineventlogs/ -o Project2`
`python3 APT-Hunter.py -huntfile "(psexec|psexesvc)" -p /opt/wineventlogs/ -o Project2` `python3 APT-Hunter.py -huntfile "(psexec|psexesvc)" -p /opt/wineventlogs/ -o Project2`
hunting using file that contain list of regex : hunting using file that contain list of regex :
`python3 APT-Hunter.py -huntfile "huntfile.txt)" -p /opt/wineventlogs/ -o Project2` `python3 APT-Hunter.py -huntfile "huntfile.txt)" -p /opt/wineventlogs/ -o Project2`
Hunting using sigma rules : Hunting using sigma rules :
`python3 APT-Hunter.py -sigma -rules rules.json -p /opt/wineventlogs/ -o Project2` `python3 APT-Hunter.py -sigma -rules rules.json -p /opt/wineventlogs/ -o Project2`
Getting Latest sigma rules converted for APT-Hunter ( output will be a file with name rules.json that contain the rules from Sigma repository [Sigma](https://github.com/SigmaHQ/sigma) ): Getting Latest sigma rules converted for APT-Hunter ( output will be a file with name rules.json that contain the rules from Sigma repository [Sigma](https://github.com/SigmaHQ/sigma) ):
Get_Latest_Sigma_Rules.sh Get_Latest_Sigma_Rules.sh
# Output Samples # Output Samples
![APT-Hunter CSV Output](Samples/Sample_TimeSketch.csv) : This CSV file you can upload it to timesketch in order to have timeline analysis that will help you see the full picture of the attack . ![APT-Hunter CSV Output](Samples/Sample_TimeSketch.csv) : This CSV file you can upload it to timesketch in order to have timeline analysis that will help you see the full picture of the attack .
![APT-Hunter Excel Output](Samples/Sample_Report.xlsx) : this excel sheet will include all the events detected from every windows logs provided to APT-Hunter. ![APT-Hunter Excel Output](Samples/Sample_Report.xlsx) : this excel sheet will include all the events detected from every windows logs provided to APT-Hunter.
![APT-Hunter Success and Failed logon Report ](Samples/Sample_Logon_Events.csv) : ALl logon events with parsed fields (Date, User , Source IP , Logon Process , Workstation Name , Logon Type , Device Name , Original Log ) as columns. ![APT-Hunter Success and Failed logon Report ](Samples/Sample_Logon_Events.csv) : ALl logon events with parsed fields (Date, User , Source IP , Logon Process , Workstation Name , Logon Type , Device Name , Original Log ) as columns.
![APT-Hunter Process Execution Report ](Samples/Sample_Process_Execution_Events.csv) : all process execution captured from the event logs. ![APT-Hunter Process Execution Report ](Samples/Sample_Process_Execution_Events.csv) : all process execution captured from the event logs.
![APT-Hunter Object Access Report ](Samples/Sample_Object_Access_Events.csv) : all object access captured from Event (4663) . ![APT-Hunter Object Access Report ](Samples/Sample_Object_Access_Events.csv) : all object access captured from Event (4663) .
![APT-Hunter Collected SID Report ](Samples/Sample_Collected-SIDS.csv) : Collected Users with their SID list to help you in the investigation. ![APT-Hunter Collected SID Report ](Samples/Sample_Collected-SIDS.csv) : Collected Users with their SID list to help you in the investigation.
![APT-Hunter EventID Frequency Report ](Samples/EventID_Frequency_Analysis.xls) : EventID frequency analysis report. ![APT-Hunter EventID Frequency Report ](Samples/EventID_Frequency_Analysis.xls) : EventID frequency analysis report.
# Credits : # Credits :
I would like to thank [Joe Maccry](https://www.linkedin.com/in/joemccray/) for his amazing contribution in Sysmon use cases ( more than 100 use cases added by Joe ) I would like to thank [Joe Maccry](https://www.linkedin.com/in/joemccray/) for his amazing contribution in Sysmon use cases ( more than 100 use cases added by Joe )

@ -1,13 +1,13 @@
Banner=""" Banner="""
/$$$$$$ /$$$$$$$ /$$$$$$$$ /$$ /$$ /$$ /$$$$$$ /$$$$$$$ /$$$$$$$$ /$$ /$$ /$$
/$$__ $$| $$__ $$|__ $$__/ | $$ | $$ | $$ /$$__ $$| $$__ $$|__ $$__/ | $$ | $$ | $$
| $$ \ $$| $$ \ $$ | $$ | $$ | $$ /$$ /$$ /$$$$$$$ /$$$$$$ /$$$$$$ /$$$$$$ | $$ \ $$| $$ \ $$ | $$ | $$ | $$ /$$ /$$ /$$$$$$$ /$$$$$$ /$$$$$$ /$$$$$$
| $$$$$$$$| $$$$$$$/ | $$ /$$$$$$| $$$$$$$$| $$ | $$| $$__ $$|_ $$_/ /$$__ $$ /$$__ $$ | $$$$$$$$| $$$$$$$/ | $$ /$$$$$$| $$$$$$$$| $$ | $$| $$__ $$|_ $$_/ /$$__ $$ /$$__ $$
| $$__ $$| $$____/ | $$ |______/| $$__ $$| $$ | $$| $$ \ $$ | $$ | $$$$$$$$| $$ \__/ | $$__ $$| $$____/ | $$ |______/| $$__ $$| $$ | $$| $$ \ $$ | $$ | $$$$$$$$| $$ \__/
| $$ | $$| $$ | $$ | $$ | $$| $$ | $$| $$ | $$ | $$ /$$| $$_____/| $$ | $$ | $$| $$ | $$ | $$ | $$| $$ | $$| $$ | $$ | $$ /$$| $$_____/| $$
| $$ | $$| $$ | $$ | $$ | $$| $$$$$$/| $$ | $$ | $$$$/| $$$$$$$| $$ | $$ | $$| $$ | $$ | $$ | $$| $$$$$$/| $$ | $$ | $$$$/| $$$$$$$| $$
|__/ |__/|__/ |__/ |__/ |__/ \______/ |__/ |__/ \___/ \_______/|__/ |__/ |__/|__/ |__/ |__/ |__/ \______/ |__/ |__/ \___/ \_______/|__/
By : Ahmed Khlief , @ahmed_khlief By : Ahmed Khlief , @ahmed_khlief
Version : 3.3 Version : 3.3
""" """

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

@ -1,72 +1,72 @@
import csv import csv
import re import re
from netaddr import * from netaddr import *
import xml.etree.ElementTree as ET import xml.etree.ElementTree as ET
import pandas as pd import pandas as pd
from datetime import datetime , timezone from datetime import datetime , timezone
from evtx import PyEvtxParser from evtx import PyEvtxParser
from dateutil.parser import parse from dateutil.parser import parse
from dateutil.parser import isoparse from dateutil.parser import isoparse
from pytz import timezone from pytz import timezone
minlength=1000 minlength=1000
Hunting_events=[{'Date and Time':[],'timestamp':[],'Channel':[],'Computer':[],'Event ID':[],'Original Event Log':[]}] Hunting_events=[{'Date and Time':[],'timestamp':[],'Channel':[],'Computer':[],'Event ID':[],'Original Event Log':[]}]
EventID_rex = re.compile('<EventID.*>(.*)<\/EventID>', re.IGNORECASE) EventID_rex = re.compile('<EventID.*>(.*)<\/EventID>', re.IGNORECASE)
Channel_rex = re.compile('<Channel.*>(.*)<\/Channel>', re.IGNORECASE) Channel_rex = re.compile('<Channel.*>(.*)<\/Channel>', re.IGNORECASE)
Computer_rex = re.compile('<Computer.*>(.*)<\/Computer>', re.IGNORECASE) Computer_rex = re.compile('<Computer.*>(.*)<\/Computer>', re.IGNORECASE)
def Evtx_hunt(files,str_regexes,eid,input_timzone,output,timestart,timeend): def Evtx_hunt(files,str_regexes,eid,input_timzone,output,timestart,timeend):
for file in files: for file in files:
file=str(file) file=str(file)
print("Analyzing "+file) print("Analyzing "+file)
try: try:
parser = PyEvtxParser(file) parser = PyEvtxParser(file)
except: except:
print("Issue analyzing "+file +"\nplease check if its not corrupted") print("Issue analyzing "+file +"\nplease check if its not corrupted")
continue continue
try: try:
for record in parser.records(): for record in parser.records():
EventID = EventID_rex.findall(record['data']) EventID = EventID_rex.findall(record['data'])
if timestart is not None and timeend is not None: if timestart is not None and timeend is not None:
timestamp = datetime.timestamp(isoparse(parse(record["timestamp"]).astimezone(input_timzone).isoformat())) timestamp = datetime.timestamp(isoparse(parse(record["timestamp"]).astimezone(input_timzone).isoformat()))
if not (timestamp > timestart and timestamp < timeend): if not (timestamp > timestart and timestamp < timeend):
return return
if len(EventID) > 0: if len(EventID) > 0:
if eid is not None and EventID[0]!=eid: if eid is not None and EventID[0]!=eid:
continue continue
Computer = Computer_rex.findall(record['data']) Computer = Computer_rex.findall(record['data'])
Channel = Channel_rex.findall(record['data']) Channel = Channel_rex.findall(record['data'])
if len(Channel)>0: if len(Channel)>0:
channel=Channel[0] channel=Channel[0]
else: else:
channel=" " channel=" "
#print(record['data']) #print(record['data'])
# if record['data'].lower().find(str_regex.lower())>-1: # if record['data'].lower().find(str_regex.lower())>-1:
#print(str_regexes) #print(str_regexes)
for str_regex in str_regexes: for str_regex in str_regexes:
rex=re.compile(str_regex, re.IGNORECASE) rex=re.compile(str_regex, re.IGNORECASE)
#print(rex) #print(rex)
#print(rex.findall(record['data'])) #print(rex.findall(record['data']))
if rex.findall(record['data']): if rex.findall(record['data']):
#print("EventID : "+EventID[0]+" , Data : "+record['data']) #print("EventID : "+EventID[0]+" , Data : "+record['data'])
Hunting_events[0]['timestamp'].append(datetime.timestamp(isoparse(parse(record["timestamp"]).astimezone(input_timzone).isoformat()))) Hunting_events[0]['timestamp'].append(datetime.timestamp(isoparse(parse(record["timestamp"]).astimezone(input_timzone).isoformat())))
Hunting_events[0]['Date and Time'].append(parse(record["timestamp"]).astimezone(input_timzone).isoformat()) Hunting_events[0]['Date and Time'].append(parse(record["timestamp"]).astimezone(input_timzone).isoformat())
Hunting_events[0]['Channel'].append(channel) Hunting_events[0]['Channel'].append(channel)
Hunting_events[0]['Event ID'].append(EventID[0]) Hunting_events[0]['Event ID'].append(EventID[0])
Hunting_events[0]['Computer'].append(Computer[0]) Hunting_events[0]['Computer'].append(Computer[0])
Hunting_events[0]['Original Event Log'].append(str(record['data']).replace("\r", " ").replace("\n", " ")) Hunting_events[0]['Original Event Log'].append(str(record['data']).replace("\r", " ").replace("\n", " "))
except Exception as e: except Exception as e:
print("issue searching log : "+record['data']+"\n Error : "+print(e)) print("issue searching log : "+record['data']+"\n Error : "+print(e))
hunt_report(output) hunt_report(output)
def hunt_report(output): def hunt_report(output):
global Hunting_events global Hunting_events
Events = pd.DataFrame(Hunting_events[0]) Events = pd.DataFrame(Hunting_events[0])
print("Found "+str(len(Hunting_events[0]["timestamp"]))+" Events") print("Found "+str(len(Hunting_events[0]["timestamp"]))+" Events")
Events.to_csv(output+"_hunting.csv", index=False) Events.to_csv(output+"_hunting.csv", index=False)

@ -1,321 +1,321 @@
import json import json
import sqlite3 import sqlite3
import tempfile import tempfile
import os import os
import time import time
import pandas as pd import pandas as pd
import geoip2.database import geoip2.database
import requests import requests
from dateutil import parser, tz from dateutil import parser, tz
import pandas as pd import pandas as pd
import json import json
import csv import csv
from pathlib import Path from pathlib import Path
start_time=0 start_time=0
end_time=0 end_time=0
password_spray_query = ''' password_spray_query = '''
WITH FailedLogins AS ( WITH FailedLogins AS (
SELECT SELECT
UserId, UserId,
ClientIP, ClientIP,
datetime(CreationTime) AS LoginDate datetime(CreationTime) AS LoginDate
FROM FROM
events events
WHERE WHERE
Operation = 'UserLoginFailed' Operation = 'UserLoginFailed'
) )
SELECT SELECT
UserId, UserId,
GROUP_CONCAT(ClientIP, ', ') AS ClientIPs, GROUP_CONCAT(ClientIP, ', ') AS ClientIPs,
COUNT(DISTINCT ClientIP) AS UniqueIPCount, COUNT(DISTINCT ClientIP) AS UniqueIPCount,
COUNT(*) AS FailedLoginAttempts, COUNT(*) AS FailedLoginAttempts,
LoginDate LoginDate
FROM FROM
FailedLogins FailedLogins
GROUP BY GROUP BY
UserId, UserId,
strftime('%Y-%m-%d %H', LoginDate) strftime('%Y-%m-%d %H', LoginDate)
HAVING HAVING
COUNT(*) > 5 AND UniqueIPCount > 3 COUNT(*) > 5 AND UniqueIPCount > 3
ORDER BY ORDER BY
FailedLoginAttempts DESC; FailedLoginAttempts DESC;
''' '''
user_logon_query = ''' user_logon_query = '''
SELECT SELECT
UserId, UserId,
date(CreationTime) AS LoginDate, date(CreationTime) AS LoginDate,
COUNT(*) AS TotalLoginAttempts, COUNT(*) AS TotalLoginAttempts,
SUM(CASE WHEN Operation = 'UserLoggedIn' THEN 1 ELSE 0 END) AS SuccessfulLogins, SUM(CASE WHEN Operation = 'UserLoggedIn' THEN 1 ELSE 0 END) AS SuccessfulLogins,
SUM(CASE WHEN Operation = 'UserLoginFailed' THEN 1 ELSE 0 END) AS FailedLogins SUM(CASE WHEN Operation = 'UserLoginFailed' THEN 1 ELSE 0 END) AS FailedLogins
FROM FROM
events events
where where
Operation = 'UserLoggedIn' OR Operation = 'UserLoginFailed' Operation = 'UserLoggedIn' OR Operation = 'UserLoginFailed'
GROUP BY GROUP BY
UserId, UserId,
LoginDate LoginDate
ORDER BY ORDER BY
LoginDate, LoginDate,
UserId; UserId;
''' '''
User_operations_query = ''' User_operations_query = '''
SELECT SELECT
UserId, UserId,
COUNT(DISTINCT Operation) AS OperationCount, COUNT(DISTINCT Operation) AS OperationCount,
GROUP_CONCAT(Operation, ', ') AS UniqueOperations GROUP_CONCAT(Operation, ', ') AS UniqueOperations
FROM FROM
(SELECT DISTINCT UserId, Operation FROM events) (SELECT DISTINCT UserId, Operation FROM events)
GROUP BY GROUP BY
UserId UserId
ORDER BY ORDER BY
OperationCount DESC; OperationCount DESC;
''' '''
user_operation_by_day_query = ''' user_operation_by_day_query = '''
SELECT SELECT
UserId, UserId,
DATE(CreationTime) AS OperationDate, DATE(CreationTime) AS OperationDate,
COUNT(DISTINCT Operation) AS OperationCount, COUNT(DISTINCT Operation) AS OperationCount,
GROUP_CONCAT( Operation, ', ') AS UniqueOperations GROUP_CONCAT( Operation, ', ') AS UniqueOperations
FROM FROM
events events
GROUP BY GROUP BY
UserId, UserId,
OperationDate OperationDate
ORDER BY ORDER BY
OperationCount DESC OperationCount DESC
''' '''
def convert_csv(input_file,temp): def convert_csv(input_file,temp):
with open(input_file, 'r', encoding='utf-8') as csv_file: with open(input_file, 'r', encoding='utf-8') as csv_file:
# Create a CSV reader # Create a CSV reader
reader = csv.DictReader(csv_file) reader = csv.DictReader(csv_file)
json_file = 'audit_data.json' json_file = 'audit_data.json'
json_file=os.path.join(temp, json_file) json_file=os.path.join(temp, json_file)
with open(json_file, 'w', encoding='utf-8') as jsonl_file: with open(json_file, 'w', encoding='utf-8') as jsonl_file:
# Extract and write the AuditData column to a file as JSON Lines # Extract and write the AuditData column to a file as JSON Lines
for row in reader: for row in reader:
# Extract the AuditData which is already a JSON formatted string # Extract the AuditData which is already a JSON formatted string
json_data = json.loads(row['AuditData']) json_data = json.loads(row['AuditData'])
# Convert the JSON object back to a string to store in the file # Convert the JSON object back to a string to store in the file
json_string = json.dumps(json_data) json_string = json.dumps(json_data)
# Write the JSON string to the file with a newline # Write the JSON string to the file with a newline
jsonl_file.write(json_string + '\n') jsonl_file.write(json_string + '\n')
return json_file return json_file
def flatten_json_file(input_file, timezone, chunk_size=10000): def flatten_json_file(input_file, timezone, chunk_size=10000):
# Read the JSON file in chunks # Read the JSON file in chunks
chunks = [] chunks = []
with open(input_file, 'r') as file: with open(input_file, 'r') as file:
lines = file.readlines() lines = file.readlines()
for i in range(0, len(lines), chunk_size): for i in range(0, len(lines), chunk_size):
chunk = [json.loads(line) for line in lines[i:i + chunk_size]] chunk = [json.loads(line) for line in lines[i:i + chunk_size]]
# Convert the CreationTime to the desired timezone # Convert the CreationTime to the desired timezone
for record in chunk: for record in chunk:
if 'CreationTime' in record: if 'CreationTime' in record:
# Parse the CreationTime # Parse the CreationTime
creation_time = parser.parse(record['CreationTime']) creation_time = parser.parse(record['CreationTime'])
# Check if the datetime object is timezone aware # Check if the datetime object is timezone aware
if creation_time.tzinfo is None: if creation_time.tzinfo is None:
# Assume the original time is in UTC if no timezone info is present # Assume the original time is in UTC if no timezone info is present
creation_time = creation_time.replace(tzinfo=tz.tzutc()) creation_time = creation_time.replace(tzinfo=tz.tzutc())
# Convert the CreationTime to the desired timezone # Convert the CreationTime to the desired timezone
record['CreationTime'] = creation_time.astimezone(timezone).isoformat() record['CreationTime'] = creation_time.astimezone(timezone).isoformat()
chunks.append(pd.json_normalize(chunk)) chunks.append(pd.json_normalize(chunk))
# Concatenate all chunks into a single DataFrame # Concatenate all chunks into a single DataFrame
flattened_records = pd.concat(chunks, ignore_index=True) flattened_records = pd.concat(chunks, ignore_index=True)
return flattened_records return flattened_records
def create_sqlite_db_from_dataframe(dataframe, db_name): def create_sqlite_db_from_dataframe(dataframe, db_name):
conn = sqlite3.connect(db_name) conn = sqlite3.connect(db_name)
# Convert all columns to string # Convert all columns to string
dataframe = dataframe.astype(str) dataframe = dataframe.astype(str)
# Write the DataFrame to SQLite, treating all fields as text # Write the DataFrame to SQLite, treating all fields as text
dataframe.to_sql('events', conn, if_exists='replace', index=False, dataframe.to_sql('events', conn, if_exists='replace', index=False,
dtype={col_name: 'TEXT' for col_name in dataframe.columns}) dtype={col_name: 'TEXT' for col_name in dataframe.columns})
conn.close() conn.close()
def read_detection_rules(rule_file): def read_detection_rules(rule_file):
with open(rule_file, 'r') as file: with open(rule_file, 'r') as file:
rules = json.load(file) rules = json.load(file)
return rules return rules
def apply_detection_logic_sqlite(db_name, rules): def apply_detection_logic_sqlite(db_name, rules):
conn = sqlite3.connect(db_name) conn = sqlite3.connect(db_name)
all_detected_events = [] all_detected_events = []
for rule in rules: for rule in rules:
rule_name = rule['name'] rule_name = rule['name']
severity = rule['severity'] severity = rule['severity']
query = rule['query'] query = rule['query']
detected_events = pd.read_sql_query(query, conn) detected_events = pd.read_sql_query(query, conn)
detected_events['RuleName'] = rule_name detected_events['RuleName'] = rule_name
detected_events['Severity'] = severity detected_events['Severity'] = severity
all_detected_events.append(detected_events) all_detected_events.append(detected_events)
conn.close() conn.close()
if all_detected_events: if all_detected_events:
result = pd.concat(all_detected_events, ignore_index=True) result = pd.concat(all_detected_events, ignore_index=True)
else: else:
result = pd.DataFrame() result = pd.DataFrame()
return result return result
def download_geolite_db(geolite_db_path): def download_geolite_db(geolite_db_path):
url = "https://git.io/GeoLite2-Country.mmdb" url = "https://git.io/GeoLite2-Country.mmdb"
print(f"Downloading GeoLite2 database from {url}...") print(f"Downloading GeoLite2 database from {url}...")
response = requests.get(url) response = requests.get(url)
response.raise_for_status() # Check if the download was successful response.raise_for_status() # Check if the download was successful
with open(geolite_db_path, 'wb') as file: with open(geolite_db_path, 'wb') as file:
file.write(response.content) file.write(response.content)
print(f"GeoLite2 database downloaded and saved to {geolite_db_path}") print(f"GeoLite2 database downloaded and saved to {geolite_db_path}")
def get_country_from_ip(ip, reader): def get_country_from_ip(ip, reader):
try: try:
response = reader.country(ip) response = reader.country(ip)
return response.country.name return response.country.name
except Exception as e: except Exception as e:
#print(f"Could not resolve IP {ip}: {e}") #print(f"Could not resolve IP {ip}: {e}")
return 'Unknown' return 'Unknown'
def analyzeoff365(auditfile, rule_file, output, timezone, include_flattened_data=False, def analyzeoff365(auditfile, rule_file, output, timezone, include_flattened_data=False,
geolite_db_path='GeoLite2-Country.mmdb'): geolite_db_path='GeoLite2-Country.mmdb'):
start_time = time.time() start_time = time.time()
temp_dir = ".temp" temp_dir = ".temp"
if output is None or output == "": if output is None or output == "":
output = os.path.splitext(auditfile)[0] output = os.path.splitext(auditfile)[0]
try: try:
# Create necessary directories # Create necessary directories
os.makedirs(output, exist_ok=True) os.makedirs(output, exist_ok=True)
os.makedirs(temp_dir, exist_ok=True) os.makedirs(temp_dir, exist_ok=True)
# Check if the GeoLite2 database exists, and download it if not # Check if the GeoLite2 database exists, and download it if not
if not os.path.exists(geolite_db_path): if not os.path.exists(geolite_db_path):
download_geolite_db(geolite_db_path) download_geolite_db(geolite_db_path)
# Convert CSV to JSON (assuming convert_csv is a valid function that you have) # Convert CSV to JSON (assuming convert_csv is a valid function that you have)
json_file = convert_csv(auditfile, temp_dir) json_file = convert_csv(auditfile, temp_dir)
# Input and output file paths # Input and output file paths
input_file = json_file input_file = json_file
db_name = os.path.join(temp_dir, 'audit_data.db') db_name = os.path.join(temp_dir, 'audit_data.db')
if rule_file is None: if rule_file is None:
rule_file = 'O365_detection_rules.json' rule_file = 'O365_detection_rules.json'
output_file = f"{output}_o365_report.xlsx" output_file = f"{output}_o365_report.xlsx"
# Measure the start time # Measure the start time
# Flatten the JSON file # Flatten the JSON file
flattened_df = flatten_json_file(input_file, timezone) flattened_df = flatten_json_file(input_file, timezone)
# Create SQLite database from the flattened DataFrame # Create SQLite database from the flattened DataFrame
create_sqlite_db_from_dataframe(flattened_df, db_name) create_sqlite_db_from_dataframe(flattened_df, db_name)
# Open the GeoLite2 database # Open the GeoLite2 database
with geoip2.database.Reader(geolite_db_path) as reader: with geoip2.database.Reader(geolite_db_path) as reader:
# Resolve ClientIP to country names # Resolve ClientIP to country names
if 'ClientIP' in flattened_df.columns: if 'ClientIP' in flattened_df.columns:
flattened_df['Country'] = flattened_df['ClientIP'].apply(lambda ip: get_country_from_ip(ip, reader)) flattened_df['Country'] = flattened_df['ClientIP'].apply(lambda ip: get_country_from_ip(ip, reader))
# Read detection rules # Read detection rules
rules = read_detection_rules(rule_file) rules = read_detection_rules(rule_file)
# Apply detection logic using SQLite # Apply detection logic using SQLite
detected_events = apply_detection_logic_sqlite(db_name, rules) detected_events = apply_detection_logic_sqlite(db_name, rules)
# Reorder columns to make RuleName the first column # Reorder columns to make RuleName the first column
if not detected_events.empty: if not detected_events.empty:
columns = ['RuleName', 'Severity'] + [col for col in detected_events.columns if columns = ['RuleName', 'Severity'] + [col for col in detected_events.columns if
col not in ['RuleName', 'Severity']] col not in ['RuleName', 'Severity']]
detected_events = detected_events[columns] detected_events = detected_events[columns]
# Perform the brute-force detection query # Perform the brute-force detection query
conn = sqlite3.connect(db_name) conn = sqlite3.connect(db_name)
try: try:
user_login_tracker_df = pd.read_sql_query(user_logon_query, conn) user_login_tracker_df = pd.read_sql_query(user_logon_query, conn)
password_spray_df = pd.read_sql_query(password_spray_query, conn) password_spray_df = pd.read_sql_query(password_spray_query, conn)
user_operations_df = pd.read_sql_query(User_operations_query, conn) user_operations_df = pd.read_sql_query(User_operations_query, conn)
user_operation_by_day_df = pd.read_sql_query(user_operation_by_day_query, conn) user_operation_by_day_df = pd.read_sql_query(user_operation_by_day_query, conn)
finally: finally:
conn.close() conn.close()
# Create a new workbook with the detection results # Create a new workbook with the detection results
with pd.ExcelWriter(output_file, engine='xlsxwriter') as writer: with pd.ExcelWriter(output_file, engine='xlsxwriter') as writer:
if include_flattened_data: if include_flattened_data:
# Split the flattened data into multiple sheets if needed # Split the flattened data into multiple sheets if needed
max_rows_per_sheet = 65000 max_rows_per_sheet = 65000
num_sheets = len(flattened_df) // max_rows_per_sheet + 1 num_sheets = len(flattened_df) // max_rows_per_sheet + 1
for i in range(num_sheets): for i in range(num_sheets):
start_row = i * max_rows_per_sheet start_row = i * max_rows_per_sheet
end_row = (i + 1) * max_rows_per_sheet end_row = (i + 1) * max_rows_per_sheet
sheet_name = f'Flattened Data {i + 1}' sheet_name = f'Flattened Data {i + 1}'
flattened_df.iloc[start_row:end_row].to_excel(writer, sheet_name=sheet_name, index=False) flattened_df.iloc[start_row:end_row].to_excel(writer, sheet_name=sheet_name, index=False)
# Write statistics for various fields # Write statistics for various fields
detected_events.to_excel(writer, sheet_name='Detection Results', index=False) detected_events.to_excel(writer, sheet_name='Detection Results', index=False)
user_login_tracker_df.to_excel(writer, sheet_name='User Login Tracker', index=False) user_login_tracker_df.to_excel(writer, sheet_name='User Login Tracker', index=False)
password_spray_df.to_excel(writer, sheet_name='Password Spray Attacks', index=False) password_spray_df.to_excel(writer, sheet_name='Password Spray Attacks', index=False)
user_operations_df.to_excel(writer, sheet_name='User Operations', index=False) user_operations_df.to_excel(writer, sheet_name='User Operations', index=False)
user_operation_by_day_df.to_excel(writer, sheet_name='User Operations by Day', index=False) user_operation_by_day_df.to_excel(writer, sheet_name='User Operations by Day', index=False)
flattened_df['Operation'].value_counts().to_frame().to_excel(writer, sheet_name='Operation Stats') flattened_df['Operation'].value_counts().to_frame().to_excel(writer, sheet_name='Operation Stats')
flattened_df['ClientIP'].value_counts().to_frame().to_excel(writer, sheet_name='ClientIP Stats') flattened_df['ClientIP'].value_counts().to_frame().to_excel(writer, sheet_name='ClientIP Stats')
flattened_df['Country'].value_counts().to_frame().to_excel(writer, sheet_name='Country Stats') flattened_df['Country'].value_counts().to_frame().to_excel(writer, sheet_name='Country Stats')
flattened_df['UserAgent'].value_counts().to_frame().to_excel(writer, sheet_name='UserAgent Stats') flattened_df['UserAgent'].value_counts().to_frame().to_excel(writer, sheet_name='UserAgent Stats')
flattened_df['UserId'].value_counts().to_frame().to_excel(writer, sheet_name='UserId Stats') flattened_df['UserId'].value_counts().to_frame().to_excel(writer, sheet_name='UserId Stats')
flattened_df['AuthenticationType'].value_counts().to_frame().to_excel(writer, flattened_df['AuthenticationType'].value_counts().to_frame().to_excel(writer,
sheet_name='AuthenticationType Stats') sheet_name='AuthenticationType Stats')
# Measure the end time # Measure the end time
end_time = time.time() end_time = time.time()
print(f"Office365 analysis finished in time: {end_time - start_time:.2f} seconds") print(f"Office365 analysis finished in time: {end_time - start_time:.2f} seconds")
except Exception as e: except Exception as e:
print(f"An error occurred during the analysis: {e}") print(f"An error occurred during the analysis: {e}")
finally: finally:
#Clean up the temporary directory #Clean up the temporary directory
if os.path.exists(temp_dir): if os.path.exists(temp_dir):
for file in Path(temp_dir).glob('*'): for file in Path(temp_dir).glob('*'):
file.unlink() # Delete the file file.unlink() # Delete the file
os.rmdir(temp_dir) # Remove the directory os.rmdir(temp_dir) # Remove the directory
# Write the User Login Tracker results to a new sheet # Write the User Login Tracker results to a new sheet
# Measure the end time # Measure the end time
end_time = time.time() end_time = time.time()
# Calculate and print the running time # Calculate and print the running time
running_time = end_time - start_time running_time = end_time - start_time
print(f"Office365 hunter finished in time: {running_time:.2f} seconds") print(f"Office365 hunter finished in time: {running_time:.2f} seconds")

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 9.9 KiB

After

Width:  |  Height:  |  Size: 9.9 KiB

@ -1,11 +1,11 @@
evtx evtx
netaddr netaddr
numpy numpy
pandas pandas
python-dateutil python-dateutil
pytz pytz
six six
XlsxWriter XlsxWriter
flatten_json flatten_json
geoip2 geoip2
requests requests

File diff suppressed because one or more lines are too long

@ -1,19 +1,19 @@
User,SID User,SID
01566S-WIN16-IR$,S-1-5-18 01566S-WIN16-IR$,S-1-5-18
ANONYMOUS LOGON,S-1-5-7 ANONYMOUS LOGON,S-1-5-7
IEUser,S-1-5-21-3461203602-4096304019-2269080069-1000 IEUser,S-1-5-21-3461203602-4096304019-2269080069-1000
Administrator,S-1-5-21-308926384-506822093-3341789130-500 Administrator,S-1-5-21-308926384-506822093-3341789130-500
samir,S-1-5-21-308926384-506822093-3341789130-220106 samir,S-1-5-21-308926384-506822093-3341789130-220106
02694W-WIN10$,S-1-5-21-308926384-506822093-3341789130-84104 02694W-WIN10$,S-1-5-21-308926384-506822093-3341789130-84104
Administrator,S-1-5-21-81107902-1099128984-1836738286-500 Administrator,S-1-5-21-81107902-1099128984-1836738286-500
EXCHANGE$,S-1-5-21-2895268558-4179327395-2773671012-1108 EXCHANGE$,S-1-5-21-2895268558-4179327395-2773671012-1108
IEUser,S-1-5-21-3583694148-1414552638-2922671848-1000 IEUser,S-1-5-21-3583694148-1414552638-2922671848-1000
lgrove,S-1-5-21-308926384-506822093-3341789130-101606 lgrove,S-1-5-21-308926384-506822093-3341789130-101606
a-jbrown,S-1-5-21-308926384-506822093-3341789130-1106 a-jbrown,S-1-5-21-308926384-506822093-3341789130-1106
user01,S-1-5-21-1587066498-1489273250-1035260531-1106 user01,S-1-5-21-1587066498-1489273250-1035260531-1106
Administrator,S-1-5-21-1587066498-1489273250-1035260531-500 Administrator,S-1-5-21-1587066498-1489273250-1035260531-500
Administrator,S-1-5-21-1587066498-1489273250-1035260531-500 Administrator,S-1-5-21-1587066498-1489273250-1035260531-500
sshd_server,S-1-5-21-3583694148-1414552638-2922671848-1002 sshd_server,S-1-5-21-3583694148-1414552638-2922671848-1002
LOCAL SERVICE,S-1-5-19 LOCAL SERVICE,S-1-5-19
NETWORK SERVICE,S-1-5-20 NETWORK SERVICE,S-1-5-20
admin01,S-1-5-21-1587066498-1489273250-1035260531-1108 admin01,S-1-5-21-1587066498-1489273250-1035260531-1108
1 User SID
2 01566S-WIN16-IR$ S-1-5-18
3 ANONYMOUS LOGON S-1-5-7
4 IEUser S-1-5-21-3461203602-4096304019-2269080069-1000
5 Administrator S-1-5-21-308926384-506822093-3341789130-500
6 samir S-1-5-21-308926384-506822093-3341789130-220106
7 02694W-WIN10$ S-1-5-21-308926384-506822093-3341789130-84104
8 Administrator S-1-5-21-81107902-1099128984-1836738286-500
9 EXCHANGE$ S-1-5-21-2895268558-4179327395-2773671012-1108
10 IEUser S-1-5-21-3583694148-1414552638-2922671848-1000
11 lgrove S-1-5-21-308926384-506822093-3341789130-101606
12 a-jbrown S-1-5-21-308926384-506822093-3341789130-1106
13 user01 S-1-5-21-1587066498-1489273250-1035260531-1106
14 Administrator S-1-5-21-1587066498-1489273250-1035260531-500
15 Administrator S-1-5-21-1587066498-1489273250-1035260531-500
16 sshd_server S-1-5-21-3583694148-1414552638-2922671848-1002
17 LOCAL SERVICE S-1-5-19
18 NETWORK SERVICE S-1-5-20
19 admin01 S-1-5-21-1587066498-1489273250-1035260531-1108

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 10 KiB

After

Width:  |  Height:  |  Size: 10 KiB

Before

Width:  |  Height:  |  Size: 236 KiB

After

Width:  |  Height:  |  Size: 236 KiB

Before

Width:  |  Height:  |  Size: 86 KiB

After

Width:  |  Height:  |  Size: 86 KiB

Before

Width:  |  Height:  |  Size: 70 KiB

After

Width:  |  Height:  |  Size: 70 KiB

Before

Width:  |  Height:  |  Size: 222 KiB

After

Width:  |  Height:  |  Size: 222 KiB

@ -1,101 +1,101 @@
try{ try{
New-Item -ItemType "directory" -Path "wineventlog" New-Item -ItemType "directory" -Path "wineventlog"
} }
catch catch
{ {
echo "can't create a new directory" echo "can't create a new directory"
} }
try{ try{
get-eventlog -log Security | export-csv wineventlog/Security.csv get-eventlog -log Security | export-csv wineventlog/Security.csv
} }
catch catch
{ {
echo "Can't retrieve Security Logs" echo "Can't retrieve Security Logs"
} }
try try
{ {
Get-WinEvent -LogName System | export-csv wineventlog/System.csv Get-WinEvent -LogName System | export-csv wineventlog/System.csv
} }
catch catch
{ {
echo "Can't retrieve System Logs" echo "Can't retrieve System Logs"
} }
try{ try{
Get-WinEvent -LogName Application | export-csv wineventlog/Application.csv Get-WinEvent -LogName Application | export-csv wineventlog/Application.csv
} }
catch catch
{ {
echo "Can't retrieve Application Logs" echo "Can't retrieve Application Logs"
} }
try{ try{
Get-WinEvent -LogName "Windows PowerShell" | export-csv wineventlog/Windows_PowerShell.csv Get-WinEvent -LogName "Windows PowerShell" | export-csv wineventlog/Windows_PowerShell.csv
} }
catch catch
{ {
echo "Can't retrieve Windows PowerShell Logs" echo "Can't retrieve Windows PowerShell Logs"
} }
try{ try{
Get-WinEvent -LogName "Microsoft-Windows-TerminalServices-LocalSessionManager/Operational" | export-csv wineventlog/LocalSessionManager.csv Get-WinEvent -LogName "Microsoft-Windows-TerminalServices-LocalSessionManager/Operational" | export-csv wineventlog/LocalSessionManager.csv
} }
catch catch
{ {
echo "Can't retrieve Microsoft-Windows-TerminalServices-LocalSessionManager/Operational Logs" echo "Can't retrieve Microsoft-Windows-TerminalServices-LocalSessionManager/Operational Logs"
} }
try{ try{
Get-WinEvent -LogName "Microsoft-Windows-Windows Defender/Operational" | export-csv wineventlog/Windows_Defender.csv Get-WinEvent -LogName "Microsoft-Windows-Windows Defender/Operational" | export-csv wineventlog/Windows_Defender.csv
} }
catch catch
{ {
echo "Can't retrieve Microsoft-Windows-Windows Defender/Operational Logs" echo "Can't retrieve Microsoft-Windows-Windows Defender/Operational Logs"
} }
try{ try{
Get-WinEvent -LogName Microsoft-Windows-TaskScheduler/Operational | export-csv wineventlog/TaskScheduler.csv Get-WinEvent -LogName Microsoft-Windows-TaskScheduler/Operational | export-csv wineventlog/TaskScheduler.csv
} }
catch catch
{ {
echo "Can't retrieve Microsoft-Windows-TaskScheduler/Operational Logs" echo "Can't retrieve Microsoft-Windows-TaskScheduler/Operational Logs"
} }
try{ try{
Get-WinEvent -LogName Microsoft-Windows-WinRM/Operational | export-csv wineventlog/WinRM.csv Get-WinEvent -LogName Microsoft-Windows-WinRM/Operational | export-csv wineventlog/WinRM.csv
} }
catch catch
{ {
echo "Can't retrieve Microsoft-Windows-WinRM/Operational Logs" echo "Can't retrieve Microsoft-Windows-WinRM/Operational Logs"
} }
try{ try{
Get-WinEvent -LogName Microsoft-Windows-Sysmon/Operational | export-csv wineventlog/Sysmon.csv Get-WinEvent -LogName Microsoft-Windows-Sysmon/Operational | export-csv wineventlog/Sysmon.csv
} }
catch catch
{ {
echo "Can't retrieve Microsoft-Windows-Sysmon/Operational Logs" echo "Can't retrieve Microsoft-Windows-Sysmon/Operational Logs"
} }
try{ try{
Get-WinEvent -LogName Microsoft-Windows-PowerShell/Operational | export-csv wineventlog/Powershell_Operational.csv Get-WinEvent -LogName Microsoft-Windows-PowerShell/Operational | export-csv wineventlog/Powershell_Operational.csv
} }
catch catch
{ {
echo "Can't retrieve Microsoft-Windows-PowerShell/Operational Logs" echo "Can't retrieve Microsoft-Windows-PowerShell/Operational Logs"
} }
try try
{ {
Compress-Archive -Path wineventlog -DestinationPath ./logs.zip Compress-Archive -Path wineventlog -DestinationPath ./logs.zip
} }
catch catch
{ {
echo "couldn't compress the the log folder " echo "couldn't compress the the log folder "
} }

@ -1,101 +1,101 @@
try{ try{
New-Item -ItemType "directory" -Path "wineventlog" New-Item -ItemType "directory" -Path "wineventlog"
} }
catch catch
{ {
echo "can't create a new directory" echo "can't create a new directory"
} }
try{ try{
wevtutil epl Security wineventlog/Security.evtx wevtutil epl Security wineventlog/Security.evtx
} }
catch catch
{ {
echo "Can't retrieve Security Logs" echo "Can't retrieve Security Logs"
} }
try try
{ {
wevtutil epl System wineventlog/System.evtx wevtutil epl System wineventlog/System.evtx
} }
catch catch
{ {
echo "Can't retrieve System Logs" echo "Can't retrieve System Logs"
} }
try{ try{
wevtutil epl Application wineventlog/Application.evtx wevtutil epl Application wineventlog/Application.evtx
} }
catch catch
{ {
echo "Can't retrieve Application Logs" echo "Can't retrieve Application Logs"
} }
try{ try{
wevtutil epl "Windows PowerShell" wineventlog/Windows_PowerShell.evtx wevtutil epl "Windows PowerShell" wineventlog/Windows_PowerShell.evtx
} }
catch catch
{ {
echo "Can't retrieve Windows PowerShell Logs" echo "Can't retrieve Windows PowerShell Logs"
} }
try{ try{
wevtutil epl "Microsoft-Windows-TerminalServices-LocalSessionManager/Operational" wineventlog/LocalSessionManager.evtx wevtutil epl "Microsoft-Windows-TerminalServices-LocalSessionManager/Operational" wineventlog/LocalSessionManager.evtx
} }
catch catch
{ {
echo "Can't retrieve Microsoft-Windows-TerminalServices-LocalSessionManager/Operational Logs" echo "Can't retrieve Microsoft-Windows-TerminalServices-LocalSessionManager/Operational Logs"
} }
try{ try{
wevtutil epl "Microsoft-Windows-Windows Defender/Operational" wineventlog/Windows_Defender.evtx wevtutil epl "Microsoft-Windows-Windows Defender/Operational" wineventlog/Windows_Defender.evtx
} }
catch catch
{ {
echo "Can't retrieve Microsoft-Windows-Windows Defender/Operational Logs" echo "Can't retrieve Microsoft-Windows-Windows Defender/Operational Logs"
} }
try{ try{
wevtutil epl Microsoft-Windows-TaskScheduler/Operational wineventlog/TaskScheduler.evtx wevtutil epl Microsoft-Windows-TaskScheduler/Operational wineventlog/TaskScheduler.evtx
} }
catch catch
{ {
echo "Can't retrieve Microsoft-Windows-TaskScheduler/Operational Logs" echo "Can't retrieve Microsoft-Windows-TaskScheduler/Operational Logs"
} }
try{ try{
wevtutil epl Microsoft-Windows-WinRM/Operational wineventlog/WinRM.evtx wevtutil epl Microsoft-Windows-WinRM/Operational wineventlog/WinRM.evtx
} }
catch catch
{ {
echo "Can't retrieve Microsoft-Windows-WinRM/Operational Logs" echo "Can't retrieve Microsoft-Windows-WinRM/Operational Logs"
} }
try{ try{
wevtutil epl Microsoft-Windows-Sysmon/Operational wineventlog/Sysmon.evtx wevtutil epl Microsoft-Windows-Sysmon/Operational wineventlog/Sysmon.evtx
} }
catch catch
{ {
echo "Can't retrieve Microsoft-Windows-Sysmon/Operational Logs" echo "Can't retrieve Microsoft-Windows-Sysmon/Operational Logs"
} }
try{ try{
wevtutil epl Microsoft-Windows-PowerShell/Operational wineventlog/Powershell_Operational.evtx wevtutil epl Microsoft-Windows-PowerShell/Operational wineventlog/Powershell_Operational.evtx
} }
catch catch
{ {
echo "Can't retrieve Microsoft-Windows-PowerShell/Operational Logs" echo "Can't retrieve Microsoft-Windows-PowerShell/Operational Logs"
} }
try try
{ {
Compress-Archive -Path wineventlog -DestinationPath ./logs.zip Compress-Archive -Path wineventlog -DestinationPath ./logs.zip
} }
catch catch
{ {
echo "couldn't compress the the log folder " echo "couldn't compress the the log folder "
} }
Loading…
Cancel
Save