Compare commits

..

No commits in common. 'main' and 'morry' have entirely different histories.
main ... morry

@ -1,6 +1,3 @@
# apt-hunter
- 在本地新建一个空文件夹然后右键使用tortoisegit克隆到空文件夹中
- When you upload the file path depth of 2 or more layers, please be sure to note that the file upload path '/' will be translated into the url '%2F' which will create a new folder! Be sure to pay attention!
- 当你上传的文件路径深度在2层及以上时请一定要注意文件上传路径'/' 会被url 翻译为 '%2F' 这样会新建一个文件夹!一定要注意!

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

@ -1,15 +1,15 @@
#!/bin/bash
if [ "$#" -ne 1 ]; then
echo "Please enter rules path as argument "
exit 1
fi
echo "Getting Sigma Converter Toot"
git clone https://github.com/SigmaHQ/legacy-sigmatools.git
echo "Converting sigma rules "
legacy-sigmatools/tools/sigmac --recurse --target sqlite --backend-option table=Events --output-format json -d $1 -c lib/config/sigma-converter-rules-config.yml -o rules.json --output-fields title,id,description,author,tags,level,falsepositives,filename,status
echo "Rules created with file name : rules.json "
#!/bin/bash
if [ "$#" -ne 1 ]; then
echo "Please enter rules path as argument "
exit 1
fi
echo "Getting Sigma Converter Toot"
git clone https://github.com/SigmaHQ/legacy-sigmatools.git
echo "Converting sigma rules "
legacy-sigmatools/tools/sigmac --recurse --target sqlite --backend-option table=Events --output-format json -d $1 -c lib/config/sigma-converter-rules-config.yml -o rules.json --output-fields title,id,description,author,tags,level,falsepositives,filename,status
echo "Rules created with file name : rules.json "

@ -1,11 +1,11 @@
#!/bin/bash
echo "Getting Sigma Converter Toot"
git clone https://github.com/SigmaHQ/legacy-sigmatools.git
echo "Getting Sigma Rules"
git clone https://github.com/SigmaHQ/sigma.git
echo "Converting sigma rules "
legacy-sigmatools/tools/sigmac --recurse --target sqlite --backend-option table=Events --output-format json -d sigma/rules/windows/ -c lib/config/sigma-converter-rules-config.yml -o rules.json --output-fields title,id,description,author,tags,level,falsepositives,filename,status
echo "Rules created with file name : rules.json "
#!/bin/bash
echo "Getting Sigma Converter Toot"
git clone https://github.com/SigmaHQ/legacy-sigmatools.git
echo "Getting Sigma Rules"
git clone https://github.com/SigmaHQ/sigma.git
echo "Converting sigma rules "
legacy-sigmatools/tools/sigmac --recurse --target sqlite --backend-option table=Events --output-format json -d sigma/rules/windows/ -c lib/config/sigma-converter-rules-config.yml -o rules.json --output-fields title,id,description,author,tags,level,falsepositives,filename,status
echo "Rules created with file name : rules.json "

File diff suppressed because it is too large Load Diff

@ -1,99 +1,99 @@
[
{
"name": "Suspicious User Agent",
"severity": "High",
"query": "SELECT * FROM events WHERE UserAgent LIKE '%python%' OR UserAgent LIKE '%ruler%' OR UserAgent LIKE '%curl%' OR UserAgent LIKE '%Wget%' OR UserAgent LIKE '%python-requests%' OR UserAgent LIKE '%AADInternals%' OR UserAgent LIKE '%azurehound%' OR UserAgent LIKE '%axios%' OR UserAgent LIKE '%BAV2ROPC%' "
},
{
"name": "User adding or removing Inbox Rule",
"severity": "Medium",
"query": "SELECT * FROM events WHERE Operation LIKE '%InboxRule%' OR Operation LIKE 'Set-Mailbox' OR Operation LIKE '%DeliverToMailboxAndForward%' OR Operation LIKE '%ForwardingAddress%' OR Operation LIKE '%ForwardingAddress%' "
},
{
"name": "After Hours Activity",
"severity": "Medium",
"query": "SELECT * FROM events WHERE (CASE WHEN CAST(substr(CreationTime, 12, 2) AS INTEGER) < 0 THEN 24 + (CAST(substr(CreationTime, 12, 2) AS INTEGER)) ELSE CAST(substr(CreationTime, 12, 2) AS INTEGER) END >= 20 OR CASE WHEN CAST(substr(CreationTime, 12, 2) AS INTEGER) < 0 THEN 24 + (CAST(substr(CreationTime, 12, 2) AS INTEGER)) ELSE CAST(substr(CreationTime, 12, 2) AS INTEGER) END < 6) AND NOT (Operation LIKE 'File%' OR Operation LIKE 'List%' OR Operation LIKE 'Page%' OR Operation LIKE '%UserLogin%');"
},
{
"name": "Possible file exfiltration",
"severity": "Low",
"query": "SELECT * FROM events WHERE Operation LIKE '%FileUploaded%' "
},
{
"name": "Admin searching in emails of other users",
"severity": "Low",
"query": "SELECT * FROM events WHERE Operation LIKE '%SearchStarted%' OR Operation LIKE '%SearchExportDownloaded%' OR Operation LIKE '%ViewedSearchExported%' "
},
{
"name": "Strong Authentication Disabled",
"severity": "medium",
"query": "SELECT * FROM events WHERE Operation LIKE '%disable strong authentication%'"
},
{
"name": "User added to admin group",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%add member to group%' AND ModifiedProperties Like '%admin%') OR ( Operation LIKE '%AddedToGroup%' AND TargetUserOrGroupName Like '%admin%') "
},
{
"name": "New Policy created",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%add policy%' ) "
},
{
"name": "Security Alert triggered",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%AlertTriggered%' AND NOT Severity Like '%Low%') "
},
{
"name": "Transport rules ( mail flow rules ) modified",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%TransportRule%') "
},
{
"name": "An application was registered in Azure AD",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%Add service principal.%') "
},
{
"name": "Add app role assignment grant to user",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%Add app role assignment grant to user.%') "
},
{
"name": "eDiscovery Abuse",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%New-ComplianceSearch%') "
},
{
"name": "Operations affecting OAuth Applications",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation = 'Add application.' OR Operation = 'Update application' OR Operation = 'Add service principal.' OR Operation = 'Update application Certificates and secrets management' OR Operation = 'Update applicationUpdate service principal.' OR Operation = 'Add app role assignment grant to user.' OR Operation = 'Add delegated permission grant.' OR Operation = 'Add owner to application.' OR Operation = 'Add owner to service principal.') "
},
{
"name": "Suspicious Operations affecting Mailbox ",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation = 'Set-MailboxJunkEmailConfiguration' OR Operation = 'SoftDelete' OR Operation = 'SendAs' OR Operation = 'HardDelete' OR Operation = 'MoveToDeletedItems' ) "
},
{
"name": "Suspicious Operations affecting SharePoint ",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation = 'AddedToSecureLink' OR Operation = 'SearchQueryPerformed' OR Operation = 'SecureLinkCreated' OR Operation = 'SecureLinkUpdated' OR Operation = 'SharingInvitationCreated' ) "
},
{
"name": "User Modifying RetentionPolicy ",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%UnifiedAuditLogRetentionPolicy%' ) "
},
{
"name": "User Modifying Audit Logging ",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%AdminAuditLogConfig%' ) "
},
{
"name": "String Authentication Disabled ",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%Disable Strong Authentication.%' ) "
}
[
{
"name": "Suspicious User Agent",
"severity": "High",
"query": "SELECT * FROM events WHERE UserAgent LIKE '%python%' OR UserAgent LIKE '%ruler%' OR UserAgent LIKE '%curl%' OR UserAgent LIKE '%Wget%' OR UserAgent LIKE '%python-requests%' OR UserAgent LIKE '%AADInternals%' OR UserAgent LIKE '%azurehound%' OR UserAgent LIKE '%axios%' OR UserAgent LIKE '%BAV2ROPC%' "
},
{
"name": "User adding or removing Inbox Rule",
"severity": "Medium",
"query": "SELECT * FROM events WHERE Operation LIKE '%InboxRule%' OR Operation LIKE 'Set-Mailbox' OR Operation LIKE '%DeliverToMailboxAndForward%' OR Operation LIKE '%ForwardingAddress%' OR Operation LIKE '%ForwardingAddress%' "
},
{
"name": "After Hours Activity",
"severity": "Medium",
"query": "SELECT * FROM events WHERE (CASE WHEN CAST(substr(CreationTime, 12, 2) AS INTEGER) < 0 THEN 24 + (CAST(substr(CreationTime, 12, 2) AS INTEGER)) ELSE CAST(substr(CreationTime, 12, 2) AS INTEGER) END >= 20 OR CASE WHEN CAST(substr(CreationTime, 12, 2) AS INTEGER) < 0 THEN 24 + (CAST(substr(CreationTime, 12, 2) AS INTEGER)) ELSE CAST(substr(CreationTime, 12, 2) AS INTEGER) END < 6) AND NOT (Operation LIKE 'File%' OR Operation LIKE 'List%' OR Operation LIKE 'Page%' OR Operation LIKE '%UserLogin%');"
},
{
"name": "Possible file exfiltration",
"severity": "Low",
"query": "SELECT * FROM events WHERE Operation LIKE '%FileUploaded%' "
},
{
"name": "Admin searching in emails of other users",
"severity": "Low",
"query": "SELECT * FROM events WHERE Operation LIKE '%SearchStarted%' OR Operation LIKE '%SearchExportDownloaded%' OR Operation LIKE '%ViewedSearchExported%' "
},
{
"name": "Strong Authentication Disabled",
"severity": "medium",
"query": "SELECT * FROM events WHERE Operation LIKE '%disable strong authentication%'"
},
{
"name": "User added to admin group",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%add member to group%' AND ModifiedProperties Like '%admin%') OR ( Operation LIKE '%AddedToGroup%' AND TargetUserOrGroupName Like '%admin%') "
},
{
"name": "New Policy created",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%add policy%' ) "
},
{
"name": "Security Alert triggered",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%AlertTriggered%' AND NOT Severity Like '%Low%') "
},
{
"name": "Transport rules ( mail flow rules ) modified",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%TransportRule%') "
},
{
"name": "An application was registered in Azure AD",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%Add service principal.%') "
},
{
"name": "Add app role assignment grant to user",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%Add app role assignment grant to user.%') "
},
{
"name": "eDiscovery Abuse",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%New-ComplianceSearch%') "
},
{
"name": "Operations affecting OAuth Applications",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation = 'Add application.' OR Operation = 'Update application' OR Operation = 'Add service principal.' OR Operation = 'Update application Certificates and secrets management' OR Operation = 'Update applicationUpdate service principal.' OR Operation = 'Add app role assignment grant to user.' OR Operation = 'Add delegated permission grant.' OR Operation = 'Add owner to application.' OR Operation = 'Add owner to service principal.') "
},
{
"name": "Suspicious Operations affecting Mailbox ",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation = 'Set-MailboxJunkEmailConfiguration' OR Operation = 'SoftDelete' OR Operation = 'SendAs' OR Operation = 'HardDelete' OR Operation = 'MoveToDeletedItems' ) "
},
{
"name": "Suspicious Operations affecting SharePoint ",
"severity": "Medium",
"query": "SELECT * FROM events WHERE ( Operation = 'AddedToSecureLink' OR Operation = 'SearchQueryPerformed' OR Operation = 'SecureLinkCreated' OR Operation = 'SecureLinkUpdated' OR Operation = 'SharingInvitationCreated' ) "
},
{
"name": "User Modifying RetentionPolicy ",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%UnifiedAuditLogRetentionPolicy%' ) "
},
{
"name": "User Modifying Audit Logging ",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%AdminAuditLogConfig%' ) "
},
{
"name": "String Authentication Disabled ",
"severity": "High",
"query": "SELECT * FROM events WHERE ( Operation LIKE '%Disable Strong Authentication.%' ) "
}
]

@ -1,99 +1,99 @@
<p align="center">
<a href="https://github.com/ahmedkhlief/APT-Hunter/releases"><img src="https://img.shields.io/github/v/release/ahmedkhlief/APT-Hunter?color=blue&label=Stable%20Version&style=flat""/></a>
<a href="https://github.com/ahmedkhlief/APT-Hunter/releases"><img src="https://img.shields.io/github/downloads/ahmedkhlief/APT-Hunter/total?style=flat&label=GitHub Downloads&color=blue"/></a>
<a href="https://github.com/ahmedkhlief/APT-Hunter/stargazers"><img src="https://img.shields.io/github/stars/ahmedkhlief/APT-Hunter?style=flat&label=GitHub Stars"/></a>
<a href="https://github.com/ahmedkhlief/APT-Hunter/graphs/contributors"><img src="https://img.shields.io/github/contributors/ahmedkhlief/APT-Hunter?label=Contributors&color=blue&style=flat"/></a>
</p>
# APT-Hunter
APT-Hunter is Threat Hunting tool for windows event logs which made by purple team mindset to detect APT movements hidden in the sea of windows event logs to decrease the time to uncover suspicious activity . APT-Hunter use pre-defined detection rules and focus on statistics to uncover abnormalities which is very effective in compromise assessment . the output produced with timeline that can be analyzed directly from Excel , Timeline Explorer , Timesketch , etc...
Full information about the tool and how its used in this article : [introducing-apt-hunter-threat-hunting-tool-using-windows-event-log](https://shells.systems/introducing-apt-hunter-threat-hunting-tool-via-windows-event-log/)
New Release Info : [APT-HUNTER V3.0 : Rebuilt with Multiprocessing and new cool features](https://shells.systems/apt-hunter-v3-0-rebuilt-with-multiprocessing-and-new-cool-features/)
# Author
Twitter : [@ahmed_khlief](https://twitter.com/ahmed_khlief)
Linkedin : [Ahmed Khlief](https://www.linkedin.com/in/ahmed-khlief-499321a7)
# Donwload APT-Hunter :
Download the latest stable version of APT-Hunter with compiled binaries from [Releases](https://github.com/ahmedkhlief/APT-Hunter/releases) page.
# How to Use APT-Hunter
APT-Hunter built using python3 so in order to use the tool you need to install the required libraries.
`python3 -m pip install -r requirements.txt`
APT-Hunter is easy to use you just use the argument -h to print help to see the options needed .
` python3 APT-Hunter.py -h`
![APT-Hunter Help](screenshots/APTHunter-Help.png)
![APT-Hunter Analyzing with all report ](screenshots/APTHunter-Allreport.png)
![APT-Hunter commandline output ](screenshots/APTHunter-output.png)
![APT-Hunter Excel Output ](screenshots/APTHunter-Excel.png)
![APT-Hunter CSV Output with Time Sketch](screenshots/APTHunter-Timeline-Explorer.png)
# Exmaples :
Analyzing EVTX files , you can provide directory containing the logs or single file , APT hunter will detect the type of logs .
`python3 APT-Hunter.py -p /opt/wineventlogs/ -o Project1 -allreport`
Adding time frame to focus on specific timeline :
`python3 APT-Hunter.py -p /opt/wineventlogs/ -o Project1 -allreport -start 2022-04-03 -end 2022-04-05T20:56`
Hunting using String or regex :
`python3 APT-Hunter.py -hunt "psexec" -p /opt/wineventlogs/ -o Project2`
`python3 APT-Hunter.py -huntfile "(psexec|psexesvc)" -p /opt/wineventlogs/ -o Project2`
hunting using file that contain list of regex :
`python3 APT-Hunter.py -huntfile "huntfile.txt)" -p /opt/wineventlogs/ -o Project2`
Hunting using sigma rules :
`python3 APT-Hunter.py -sigma -rules rules.json -p /opt/wineventlogs/ -o Project2`
Getting Latest sigma rules converted for APT-Hunter ( output will be a file with name rules.json that contain the rules from Sigma repository [Sigma](https://github.com/SigmaHQ/sigma) ):
Get_Latest_Sigma_Rules.sh
# Output Samples
![APT-Hunter CSV Output](Samples/Sample_TimeSketch.csv) : This CSV file you can upload it to timesketch in order to have timeline analysis that will help you see the full picture of the attack .
![APT-Hunter Excel Output](Samples/Sample_Report.xlsx) : this excel sheet will include all the events detected from every windows logs provided to APT-Hunter.
![APT-Hunter Success and Failed logon Report ](Samples/Sample_Logon_Events.csv) : ALl logon events with parsed fields (Date, User , Source IP , Logon Process , Workstation Name , Logon Type , Device Name , Original Log ) as columns.
![APT-Hunter Process Execution Report ](Samples/Sample_Process_Execution_Events.csv) : all process execution captured from the event logs.
![APT-Hunter Object Access Report ](Samples/Sample_Object_Access_Events.csv) : all object access captured from Event (4663) .
![APT-Hunter Collected SID Report ](Samples/Sample_Collected-SIDS.csv) : Collected Users with their SID list to help you in the investigation.
![APT-Hunter EventID Frequency Report ](Samples/EventID_Frequency_Analysis.xls) : EventID frequency analysis report.
# Credits :
I would like to thank [Joe Maccry](https://www.linkedin.com/in/joemccray/) for his amazing contribution in Sysmon use cases ( more than 100 use cases added by Joe )
<p align="center">
<a href="https://github.com/ahmedkhlief/APT-Hunter/releases"><img src="https://img.shields.io/github/v/release/ahmedkhlief/APT-Hunter?color=blue&label=Stable%20Version&style=flat""/></a>
<a href="https://github.com/ahmedkhlief/APT-Hunter/releases"><img src="https://img.shields.io/github/downloads/ahmedkhlief/APT-Hunter/total?style=flat&label=GitHub Downloads&color=blue"/></a>
<a href="https://github.com/ahmedkhlief/APT-Hunter/stargazers"><img src="https://img.shields.io/github/stars/ahmedkhlief/APT-Hunter?style=flat&label=GitHub Stars"/></a>
<a href="https://github.com/ahmedkhlief/APT-Hunter/graphs/contributors"><img src="https://img.shields.io/github/contributors/ahmedkhlief/APT-Hunter?label=Contributors&color=blue&style=flat"/></a>
</p>
# APT-Hunter
APT-Hunter is Threat Hunting tool for windows event logs which made by purple team mindset to detect APT movements hidden in the sea of windows event logs to decrease the time to uncover suspicious activity . APT-Hunter use pre-defined detection rules and focus on statistics to uncover abnormalities which is very effective in compromise assessment . the output produced with timeline that can be analyzed directly from Excel , Timeline Explorer , Timesketch , etc...
Full information about the tool and how its used in this article : [introducing-apt-hunter-threat-hunting-tool-using-windows-event-log](https://shells.systems/introducing-apt-hunter-threat-hunting-tool-via-windows-event-log/)
New Release Info : [APT-HUNTER V3.0 : Rebuilt with Multiprocessing and new cool features](https://shells.systems/apt-hunter-v3-0-rebuilt-with-multiprocessing-and-new-cool-features/)
# Author
Twitter : [@ahmed_khlief](https://twitter.com/ahmed_khlief)
Linkedin : [Ahmed Khlief](https://www.linkedin.com/in/ahmed-khlief-499321a7)
# Donwload APT-Hunter :
Download the latest stable version of APT-Hunter with compiled binaries from [Releases](https://github.com/ahmedkhlief/APT-Hunter/releases) page.
# How to Use APT-Hunter
APT-Hunter built using python3 so in order to use the tool you need to install the required libraries.
`python3 -m pip install -r requirements.txt`
APT-Hunter is easy to use you just use the argument -h to print help to see the options needed .
` python3 APT-Hunter.py -h`
![APT-Hunter Help](screenshots/APTHunter-Help.png)
![APT-Hunter Analyzing with all report ](screenshots/APTHunter-Allreport.png)
![APT-Hunter commandline output ](screenshots/APTHunter-output.png)
![APT-Hunter Excel Output ](screenshots/APTHunter-Excel.png)
![APT-Hunter CSV Output with Time Sketch](screenshots/APTHunter-Timeline-Explorer.png)
# Exmaples :
Analyzing EVTX files , you can provide directory containing the logs or single file , APT hunter will detect the type of logs .
`python3 APT-Hunter.py -p /opt/wineventlogs/ -o Project1 -allreport`
Adding time frame to focus on specific timeline :
`python3 APT-Hunter.py -p /opt/wineventlogs/ -o Project1 -allreport -start 2022-04-03 -end 2022-04-05T20:56`
Hunting using String or regex :
`python3 APT-Hunter.py -hunt "psexec" -p /opt/wineventlogs/ -o Project2`
`python3 APT-Hunter.py -huntfile "(psexec|psexesvc)" -p /opt/wineventlogs/ -o Project2`
hunting using file that contain list of regex :
`python3 APT-Hunter.py -huntfile "huntfile.txt)" -p /opt/wineventlogs/ -o Project2`
Hunting using sigma rules :
`python3 APT-Hunter.py -sigma -rules rules.json -p /opt/wineventlogs/ -o Project2`
Getting Latest sigma rules converted for APT-Hunter ( output will be a file with name rules.json that contain the rules from Sigma repository [Sigma](https://github.com/SigmaHQ/sigma) ):
Get_Latest_Sigma_Rules.sh
# Output Samples
![APT-Hunter CSV Output](Samples/Sample_TimeSketch.csv) : This CSV file you can upload it to timesketch in order to have timeline analysis that will help you see the full picture of the attack .
![APT-Hunter Excel Output](Samples/Sample_Report.xlsx) : this excel sheet will include all the events detected from every windows logs provided to APT-Hunter.
![APT-Hunter Success and Failed logon Report ](Samples/Sample_Logon_Events.csv) : ALl logon events with parsed fields (Date, User , Source IP , Logon Process , Workstation Name , Logon Type , Device Name , Original Log ) as columns.
![APT-Hunter Process Execution Report ](Samples/Sample_Process_Execution_Events.csv) : all process execution captured from the event logs.
![APT-Hunter Object Access Report ](Samples/Sample_Object_Access_Events.csv) : all object access captured from Event (4663) .
![APT-Hunter Collected SID Report ](Samples/Sample_Collected-SIDS.csv) : Collected Users with their SID list to help you in the investigation.
![APT-Hunter EventID Frequency Report ](Samples/EventID_Frequency_Analysis.xls) : EventID frequency analysis report.
# Credits :
I would like to thank [Joe Maccry](https://www.linkedin.com/in/joemccray/) for his amazing contribution in Sysmon use cases ( more than 100 use cases added by Joe )

@ -1,13 +1,13 @@
Banner="""
/$$$$$$ /$$$$$$$ /$$$$$$$$ /$$ /$$ /$$
/$$__ $$| $$__ $$|__ $$__/ | $$ | $$ | $$
| $$ \ $$| $$ \ $$ | $$ | $$ | $$ /$$ /$$ /$$$$$$$ /$$$$$$ /$$$$$$ /$$$$$$
| $$$$$$$$| $$$$$$$/ | $$ /$$$$$$| $$$$$$$$| $$ | $$| $$__ $$|_ $$_/ /$$__ $$ /$$__ $$
| $$__ $$| $$____/ | $$ |______/| $$__ $$| $$ | $$| $$ \ $$ | $$ | $$$$$$$$| $$ \__/
| $$ | $$| $$ | $$ | $$ | $$| $$ | $$| $$ | $$ | $$ /$$| $$_____/| $$
| $$ | $$| $$ | $$ | $$ | $$| $$$$$$/| $$ | $$ | $$$$/| $$$$$$$| $$
|__/ |__/|__/ |__/ |__/ |__/ \______/ |__/ |__/ \___/ \_______/|__/
By : Ahmed Khlief , @ahmed_khlief
Version : 3.3
"""
Banner="""
/$$$$$$ /$$$$$$$ /$$$$$$$$ /$$ /$$ /$$
/$$__ $$| $$__ $$|__ $$__/ | $$ | $$ | $$
| $$ \ $$| $$ \ $$ | $$ | $$ | $$ /$$ /$$ /$$$$$$$ /$$$$$$ /$$$$$$ /$$$$$$
| $$$$$$$$| $$$$$$$/ | $$ /$$$$$$| $$$$$$$$| $$ | $$| $$__ $$|_ $$_/ /$$__ $$ /$$__ $$
| $$__ $$| $$____/ | $$ |______/| $$__ $$| $$ | $$| $$ \ $$ | $$ | $$$$$$$$| $$ \__/
| $$ | $$| $$ | $$ | $$ | $$| $$ | $$| $$ | $$ | $$ /$$| $$_____/| $$
| $$ | $$| $$ | $$ | $$ | $$| $$$$$$/| $$ | $$ | $$$$/| $$$$$$$| $$
|__/ |__/|__/ |__/ |__/ |__/ \______/ |__/ |__/ \___/ \_______/|__/
By : Ahmed Khlief , @ahmed_khlief
Version : 3.3
"""

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

@ -1,72 +1,72 @@
import csv
import re
from netaddr import *
import xml.etree.ElementTree as ET
import pandas as pd
from datetime import datetime , timezone
from evtx import PyEvtxParser
from dateutil.parser import parse
from dateutil.parser import isoparse
from pytz import timezone
minlength=1000
Hunting_events=[{'Date and Time':[],'timestamp':[],'Channel':[],'Computer':[],'Event ID':[],'Original Event Log':[]}]
EventID_rex = re.compile('<EventID.*>(.*)<\/EventID>', re.IGNORECASE)
Channel_rex = re.compile('<Channel.*>(.*)<\/Channel>', re.IGNORECASE)
Computer_rex = re.compile('<Computer.*>(.*)<\/Computer>', re.IGNORECASE)
def Evtx_hunt(files,str_regexes,eid,input_timzone,output,timestart,timeend):
for file in files:
file=str(file)
print("Analyzing "+file)
try:
parser = PyEvtxParser(file)
except:
print("Issue analyzing "+file +"\nplease check if its not corrupted")
continue
try:
for record in parser.records():
EventID = EventID_rex.findall(record['data'])
if timestart is not None and timeend is not None:
timestamp = datetime.timestamp(isoparse(parse(record["timestamp"]).astimezone(input_timzone).isoformat()))
if not (timestamp > timestart and timestamp < timeend):
return
if len(EventID) > 0:
if eid is not None and EventID[0]!=eid:
continue
Computer = Computer_rex.findall(record['data'])
Channel = Channel_rex.findall(record['data'])
if len(Channel)>0:
channel=Channel[0]
else:
channel=" "
#print(record['data'])
# if record['data'].lower().find(str_regex.lower())>-1:
#print(str_regexes)
for str_regex in str_regexes:
rex=re.compile(str_regex, re.IGNORECASE)
#print(rex)
#print(rex.findall(record['data']))
if rex.findall(record['data']):
#print("EventID : "+EventID[0]+" , Data : "+record['data'])
Hunting_events[0]['timestamp'].append(datetime.timestamp(isoparse(parse(record["timestamp"]).astimezone(input_timzone).isoformat())))
Hunting_events[0]['Date and Time'].append(parse(record["timestamp"]).astimezone(input_timzone).isoformat())
Hunting_events[0]['Channel'].append(channel)
Hunting_events[0]['Event ID'].append(EventID[0])
Hunting_events[0]['Computer'].append(Computer[0])
Hunting_events[0]['Original Event Log'].append(str(record['data']).replace("\r", " ").replace("\n", " "))
except Exception as e:
print("issue searching log : "+record['data']+"\n Error : "+print(e))
hunt_report(output)
def hunt_report(output):
global Hunting_events
Events = pd.DataFrame(Hunting_events[0])
print("Found "+str(len(Hunting_events[0]["timestamp"]))+" Events")
Events.to_csv(output+"_hunting.csv", index=False)
import csv
import re
from netaddr import *
import xml.etree.ElementTree as ET
import pandas as pd
from datetime import datetime , timezone
from evtx import PyEvtxParser
from dateutil.parser import parse
from dateutil.parser import isoparse
from pytz import timezone
minlength=1000
Hunting_events=[{'Date and Time':[],'timestamp':[],'Channel':[],'Computer':[],'Event ID':[],'Original Event Log':[]}]
EventID_rex = re.compile('<EventID.*>(.*)<\/EventID>', re.IGNORECASE)
Channel_rex = re.compile('<Channel.*>(.*)<\/Channel>', re.IGNORECASE)
Computer_rex = re.compile('<Computer.*>(.*)<\/Computer>', re.IGNORECASE)
def Evtx_hunt(files,str_regexes,eid,input_timzone,output,timestart,timeend):
for file in files:
file=str(file)
print("Analyzing "+file)
try:
parser = PyEvtxParser(file)
except:
print("Issue analyzing "+file +"\nplease check if its not corrupted")
continue
try:
for record in parser.records():
EventID = EventID_rex.findall(record['data'])
if timestart is not None and timeend is not None:
timestamp = datetime.timestamp(isoparse(parse(record["timestamp"]).astimezone(input_timzone).isoformat()))
if not (timestamp > timestart and timestamp < timeend):
return
if len(EventID) > 0:
if eid is not None and EventID[0]!=eid:
continue
Computer = Computer_rex.findall(record['data'])
Channel = Channel_rex.findall(record['data'])
if len(Channel)>0:
channel=Channel[0]
else:
channel=" "
#print(record['data'])
# if record['data'].lower().find(str_regex.lower())>-1:
#print(str_regexes)
for str_regex in str_regexes:
rex=re.compile(str_regex, re.IGNORECASE)
#print(rex)
#print(rex.findall(record['data']))
if rex.findall(record['data']):
#print("EventID : "+EventID[0]+" , Data : "+record['data'])
Hunting_events[0]['timestamp'].append(datetime.timestamp(isoparse(parse(record["timestamp"]).astimezone(input_timzone).isoformat())))
Hunting_events[0]['Date and Time'].append(parse(record["timestamp"]).astimezone(input_timzone).isoformat())
Hunting_events[0]['Channel'].append(channel)
Hunting_events[0]['Event ID'].append(EventID[0])
Hunting_events[0]['Computer'].append(Computer[0])
Hunting_events[0]['Original Event Log'].append(str(record['data']).replace("\r", " ").replace("\n", " "))
except Exception as e:
print("issue searching log : "+record['data']+"\n Error : "+print(e))
hunt_report(output)
def hunt_report(output):
global Hunting_events
Events = pd.DataFrame(Hunting_events[0])
print("Found "+str(len(Hunting_events[0]["timestamp"]))+" Events")
Events.to_csv(output+"_hunting.csv", index=False)

@ -1,321 +1,321 @@
import json
import sqlite3
import tempfile
import os
import time
import pandas as pd
import geoip2.database
import requests
from dateutil import parser, tz
import pandas as pd
import json
import csv
from pathlib import Path
start_time=0
end_time=0
password_spray_query = '''
WITH FailedLogins AS (
SELECT
UserId,
ClientIP,
datetime(CreationTime) AS LoginDate
FROM
events
WHERE
Operation = 'UserLoginFailed'
)
SELECT
UserId,
GROUP_CONCAT(ClientIP, ', ') AS ClientIPs,
COUNT(DISTINCT ClientIP) AS UniqueIPCount,
COUNT(*) AS FailedLoginAttempts,
LoginDate
FROM
FailedLogins
GROUP BY
UserId,
strftime('%Y-%m-%d %H', LoginDate)
HAVING
COUNT(*) > 5 AND UniqueIPCount > 3
ORDER BY
FailedLoginAttempts DESC;
'''
user_logon_query = '''
SELECT
UserId,
date(CreationTime) AS LoginDate,
COUNT(*) AS TotalLoginAttempts,
SUM(CASE WHEN Operation = 'UserLoggedIn' THEN 1 ELSE 0 END) AS SuccessfulLogins,
SUM(CASE WHEN Operation = 'UserLoginFailed' THEN 1 ELSE 0 END) AS FailedLogins
FROM
events
where
Operation = 'UserLoggedIn' OR Operation = 'UserLoginFailed'
GROUP BY
UserId,
LoginDate
ORDER BY
LoginDate,
UserId;
'''
User_operations_query = '''
SELECT
UserId,
COUNT(DISTINCT Operation) AS OperationCount,
GROUP_CONCAT(Operation, ', ') AS UniqueOperations
FROM
(SELECT DISTINCT UserId, Operation FROM events)
GROUP BY
UserId
ORDER BY
OperationCount DESC;
'''
user_operation_by_day_query = '''
SELECT
UserId,
DATE(CreationTime) AS OperationDate,
COUNT(DISTINCT Operation) AS OperationCount,
GROUP_CONCAT( Operation, ', ') AS UniqueOperations
FROM
events
GROUP BY
UserId,
OperationDate
ORDER BY
OperationCount DESC
'''
def convert_csv(input_file,temp):
with open(input_file, 'r', encoding='utf-8') as csv_file:
# Create a CSV reader
reader = csv.DictReader(csv_file)
json_file = 'audit_data.json'
json_file=os.path.join(temp, json_file)
with open(json_file, 'w', encoding='utf-8') as jsonl_file:
# Extract and write the AuditData column to a file as JSON Lines
for row in reader:
# Extract the AuditData which is already a JSON formatted string
json_data = json.loads(row['AuditData'])
# Convert the JSON object back to a string to store in the file
json_string = json.dumps(json_data)
# Write the JSON string to the file with a newline
jsonl_file.write(json_string + '\n')
return json_file
def flatten_json_file(input_file, timezone, chunk_size=10000):
# Read the JSON file in chunks
chunks = []
with open(input_file, 'r') as file:
lines = file.readlines()
for i in range(0, len(lines), chunk_size):
chunk = [json.loads(line) for line in lines[i:i + chunk_size]]
# Convert the CreationTime to the desired timezone
for record in chunk:
if 'CreationTime' in record:
# Parse the CreationTime
creation_time = parser.parse(record['CreationTime'])
# Check if the datetime object is timezone aware
if creation_time.tzinfo is None:
# Assume the original time is in UTC if no timezone info is present
creation_time = creation_time.replace(tzinfo=tz.tzutc())
# Convert the CreationTime to the desired timezone
record['CreationTime'] = creation_time.astimezone(timezone).isoformat()
chunks.append(pd.json_normalize(chunk))
# Concatenate all chunks into a single DataFrame
flattened_records = pd.concat(chunks, ignore_index=True)
return flattened_records
def create_sqlite_db_from_dataframe(dataframe, db_name):
conn = sqlite3.connect(db_name)
# Convert all columns to string
dataframe = dataframe.astype(str)
# Write the DataFrame to SQLite, treating all fields as text
dataframe.to_sql('events', conn, if_exists='replace', index=False,
dtype={col_name: 'TEXT' for col_name in dataframe.columns})
conn.close()
def read_detection_rules(rule_file):
with open(rule_file, 'r') as file:
rules = json.load(file)
return rules
def apply_detection_logic_sqlite(db_name, rules):
conn = sqlite3.connect(db_name)
all_detected_events = []
for rule in rules:
rule_name = rule['name']
severity = rule['severity']
query = rule['query']
detected_events = pd.read_sql_query(query, conn)
detected_events['RuleName'] = rule_name
detected_events['Severity'] = severity
all_detected_events.append(detected_events)
conn.close()
if all_detected_events:
result = pd.concat(all_detected_events, ignore_index=True)
else:
result = pd.DataFrame()
return result
def download_geolite_db(geolite_db_path):
url = "https://git.io/GeoLite2-Country.mmdb"
print(f"Downloading GeoLite2 database from {url}...")
response = requests.get(url)
response.raise_for_status() # Check if the download was successful
with open(geolite_db_path, 'wb') as file:
file.write(response.content)
print(f"GeoLite2 database downloaded and saved to {geolite_db_path}")
def get_country_from_ip(ip, reader):
try:
response = reader.country(ip)
return response.country.name
except Exception as e:
#print(f"Could not resolve IP {ip}: {e}")
return 'Unknown'
def analyzeoff365(auditfile, rule_file, output, timezone, include_flattened_data=False,
geolite_db_path='GeoLite2-Country.mmdb'):
start_time = time.time()
temp_dir = ".temp"
if output is None or output == "":
output = os.path.splitext(auditfile)[0]
try:
# Create necessary directories
os.makedirs(output, exist_ok=True)
os.makedirs(temp_dir, exist_ok=True)
# Check if the GeoLite2 database exists, and download it if not
if not os.path.exists(geolite_db_path):
download_geolite_db(geolite_db_path)
# Convert CSV to JSON (assuming convert_csv is a valid function that you have)
json_file = convert_csv(auditfile, temp_dir)
# Input and output file paths
input_file = json_file
db_name = os.path.join(temp_dir, 'audit_data.db')
if rule_file is None:
rule_file = 'O365_detection_rules.json'
output_file = f"{output}_o365_report.xlsx"
# Measure the start time
# Flatten the JSON file
flattened_df = flatten_json_file(input_file, timezone)
# Create SQLite database from the flattened DataFrame
create_sqlite_db_from_dataframe(flattened_df, db_name)
# Open the GeoLite2 database
with geoip2.database.Reader(geolite_db_path) as reader:
# Resolve ClientIP to country names
if 'ClientIP' in flattened_df.columns:
flattened_df['Country'] = flattened_df['ClientIP'].apply(lambda ip: get_country_from_ip(ip, reader))
# Read detection rules
rules = read_detection_rules(rule_file)
# Apply detection logic using SQLite
detected_events = apply_detection_logic_sqlite(db_name, rules)
# Reorder columns to make RuleName the first column
if not detected_events.empty:
columns = ['RuleName', 'Severity'] + [col for col in detected_events.columns if
col not in ['RuleName', 'Severity']]
detected_events = detected_events[columns]
# Perform the brute-force detection query
conn = sqlite3.connect(db_name)
try:
user_login_tracker_df = pd.read_sql_query(user_logon_query, conn)
password_spray_df = pd.read_sql_query(password_spray_query, conn)
user_operations_df = pd.read_sql_query(User_operations_query, conn)
user_operation_by_day_df = pd.read_sql_query(user_operation_by_day_query, conn)
finally:
conn.close()
# Create a new workbook with the detection results
with pd.ExcelWriter(output_file, engine='xlsxwriter') as writer:
if include_flattened_data:
# Split the flattened data into multiple sheets if needed
max_rows_per_sheet = 65000
num_sheets = len(flattened_df) // max_rows_per_sheet + 1
for i in range(num_sheets):
start_row = i * max_rows_per_sheet
end_row = (i + 1) * max_rows_per_sheet
sheet_name = f'Flattened Data {i + 1}'
flattened_df.iloc[start_row:end_row].to_excel(writer, sheet_name=sheet_name, index=False)
# Write statistics for various fields
detected_events.to_excel(writer, sheet_name='Detection Results', index=False)
user_login_tracker_df.to_excel(writer, sheet_name='User Login Tracker', index=False)
password_spray_df.to_excel(writer, sheet_name='Password Spray Attacks', index=False)
user_operations_df.to_excel(writer, sheet_name='User Operations', index=False)
user_operation_by_day_df.to_excel(writer, sheet_name='User Operations by Day', index=False)
flattened_df['Operation'].value_counts().to_frame().to_excel(writer, sheet_name='Operation Stats')
flattened_df['ClientIP'].value_counts().to_frame().to_excel(writer, sheet_name='ClientIP Stats')
flattened_df['Country'].value_counts().to_frame().to_excel(writer, sheet_name='Country Stats')
flattened_df['UserAgent'].value_counts().to_frame().to_excel(writer, sheet_name='UserAgent Stats')
flattened_df['UserId'].value_counts().to_frame().to_excel(writer, sheet_name='UserId Stats')
flattened_df['AuthenticationType'].value_counts().to_frame().to_excel(writer,
sheet_name='AuthenticationType Stats')
# Measure the end time
end_time = time.time()
print(f"Office365 analysis finished in time: {end_time - start_time:.2f} seconds")
except Exception as e:
print(f"An error occurred during the analysis: {e}")
finally:
#Clean up the temporary directory
if os.path.exists(temp_dir):
for file in Path(temp_dir).glob('*'):
file.unlink() # Delete the file
os.rmdir(temp_dir) # Remove the directory
# Write the User Login Tracker results to a new sheet
# Measure the end time
end_time = time.time()
# Calculate and print the running time
running_time = end_time - start_time
print(f"Office365 hunter finished in time: {running_time:.2f} seconds")
import json
import sqlite3
import tempfile
import os
import time
import pandas as pd
import geoip2.database
import requests
from dateutil import parser, tz
import pandas as pd
import json
import csv
from pathlib import Path
start_time=0
end_time=0
password_spray_query = '''
WITH FailedLogins AS (
SELECT
UserId,
ClientIP,
datetime(CreationTime) AS LoginDate
FROM
events
WHERE
Operation = 'UserLoginFailed'
)
SELECT
UserId,
GROUP_CONCAT(ClientIP, ', ') AS ClientIPs,
COUNT(DISTINCT ClientIP) AS UniqueIPCount,
COUNT(*) AS FailedLoginAttempts,
LoginDate
FROM
FailedLogins
GROUP BY
UserId,
strftime('%Y-%m-%d %H', LoginDate)
HAVING
COUNT(*) > 5 AND UniqueIPCount > 3
ORDER BY
FailedLoginAttempts DESC;
'''
user_logon_query = '''
SELECT
UserId,
date(CreationTime) AS LoginDate,
COUNT(*) AS TotalLoginAttempts,
SUM(CASE WHEN Operation = 'UserLoggedIn' THEN 1 ELSE 0 END) AS SuccessfulLogins,
SUM(CASE WHEN Operation = 'UserLoginFailed' THEN 1 ELSE 0 END) AS FailedLogins
FROM
events
where
Operation = 'UserLoggedIn' OR Operation = 'UserLoginFailed'
GROUP BY
UserId,
LoginDate
ORDER BY
LoginDate,
UserId;
'''
User_operations_query = '''
SELECT
UserId,
COUNT(DISTINCT Operation) AS OperationCount,
GROUP_CONCAT(Operation, ', ') AS UniqueOperations
FROM
(SELECT DISTINCT UserId, Operation FROM events)
GROUP BY
UserId
ORDER BY
OperationCount DESC;
'''
user_operation_by_day_query = '''
SELECT
UserId,
DATE(CreationTime) AS OperationDate,
COUNT(DISTINCT Operation) AS OperationCount,
GROUP_CONCAT( Operation, ', ') AS UniqueOperations
FROM
events
GROUP BY
UserId,
OperationDate
ORDER BY
OperationCount DESC
'''
def convert_csv(input_file,temp):
with open(input_file, 'r', encoding='utf-8') as csv_file:
# Create a CSV reader
reader = csv.DictReader(csv_file)
json_file = 'audit_data.json'
json_file=os.path.join(temp, json_file)
with open(json_file, 'w', encoding='utf-8') as jsonl_file:
# Extract and write the AuditData column to a file as JSON Lines
for row in reader:
# Extract the AuditData which is already a JSON formatted string
json_data = json.loads(row['AuditData'])
# Convert the JSON object back to a string to store in the file
json_string = json.dumps(json_data)
# Write the JSON string to the file with a newline
jsonl_file.write(json_string + '\n')
return json_file
def flatten_json_file(input_file, timezone, chunk_size=10000):
# Read the JSON file in chunks
chunks = []
with open(input_file, 'r') as file:
lines = file.readlines()
for i in range(0, len(lines), chunk_size):
chunk = [json.loads(line) for line in lines[i:i + chunk_size]]
# Convert the CreationTime to the desired timezone
for record in chunk:
if 'CreationTime' in record:
# Parse the CreationTime
creation_time = parser.parse(record['CreationTime'])
# Check if the datetime object is timezone aware
if creation_time.tzinfo is None:
# Assume the original time is in UTC if no timezone info is present
creation_time = creation_time.replace(tzinfo=tz.tzutc())
# Convert the CreationTime to the desired timezone
record['CreationTime'] = creation_time.astimezone(timezone).isoformat()
chunks.append(pd.json_normalize(chunk))
# Concatenate all chunks into a single DataFrame
flattened_records = pd.concat(chunks, ignore_index=True)
return flattened_records
def create_sqlite_db_from_dataframe(dataframe, db_name):
conn = sqlite3.connect(db_name)
# Convert all columns to string
dataframe = dataframe.astype(str)
# Write the DataFrame to SQLite, treating all fields as text
dataframe.to_sql('events', conn, if_exists='replace', index=False,
dtype={col_name: 'TEXT' for col_name in dataframe.columns})
conn.close()
def read_detection_rules(rule_file):
with open(rule_file, 'r') as file:
rules = json.load(file)
return rules
def apply_detection_logic_sqlite(db_name, rules):
conn = sqlite3.connect(db_name)
all_detected_events = []
for rule in rules:
rule_name = rule['name']
severity = rule['severity']
query = rule['query']
detected_events = pd.read_sql_query(query, conn)
detected_events['RuleName'] = rule_name
detected_events['Severity'] = severity
all_detected_events.append(detected_events)
conn.close()
if all_detected_events:
result = pd.concat(all_detected_events, ignore_index=True)
else:
result = pd.DataFrame()
return result
def download_geolite_db(geolite_db_path):
url = "https://git.io/GeoLite2-Country.mmdb"
print(f"Downloading GeoLite2 database from {url}...")
response = requests.get(url)
response.raise_for_status() # Check if the download was successful
with open(geolite_db_path, 'wb') as file:
file.write(response.content)
print(f"GeoLite2 database downloaded and saved to {geolite_db_path}")
def get_country_from_ip(ip, reader):
try:
response = reader.country(ip)
return response.country.name
except Exception as e:
#print(f"Could not resolve IP {ip}: {e}")
return 'Unknown'
def analyzeoff365(auditfile, rule_file, output, timezone, include_flattened_data=False,
geolite_db_path='GeoLite2-Country.mmdb'):
start_time = time.time()
temp_dir = ".temp"
if output is None or output == "":
output = os.path.splitext(auditfile)[0]
try:
# Create necessary directories
os.makedirs(output, exist_ok=True)
os.makedirs(temp_dir, exist_ok=True)
# Check if the GeoLite2 database exists, and download it if not
if not os.path.exists(geolite_db_path):
download_geolite_db(geolite_db_path)
# Convert CSV to JSON (assuming convert_csv is a valid function that you have)
json_file = convert_csv(auditfile, temp_dir)
# Input and output file paths
input_file = json_file
db_name = os.path.join(temp_dir, 'audit_data.db')
if rule_file is None:
rule_file = 'O365_detection_rules.json'
output_file = f"{output}_o365_report.xlsx"
# Measure the start time
# Flatten the JSON file
flattened_df = flatten_json_file(input_file, timezone)
# Create SQLite database from the flattened DataFrame
create_sqlite_db_from_dataframe(flattened_df, db_name)
# Open the GeoLite2 database
with geoip2.database.Reader(geolite_db_path) as reader:
# Resolve ClientIP to country names
if 'ClientIP' in flattened_df.columns:
flattened_df['Country'] = flattened_df['ClientIP'].apply(lambda ip: get_country_from_ip(ip, reader))
# Read detection rules
rules = read_detection_rules(rule_file)
# Apply detection logic using SQLite
detected_events = apply_detection_logic_sqlite(db_name, rules)
# Reorder columns to make RuleName the first column
if not detected_events.empty:
columns = ['RuleName', 'Severity'] + [col for col in detected_events.columns if
col not in ['RuleName', 'Severity']]
detected_events = detected_events[columns]
# Perform the brute-force detection query
conn = sqlite3.connect(db_name)
try:
user_login_tracker_df = pd.read_sql_query(user_logon_query, conn)
password_spray_df = pd.read_sql_query(password_spray_query, conn)
user_operations_df = pd.read_sql_query(User_operations_query, conn)
user_operation_by_day_df = pd.read_sql_query(user_operation_by_day_query, conn)
finally:
conn.close()
# Create a new workbook with the detection results
with pd.ExcelWriter(output_file, engine='xlsxwriter') as writer:
if include_flattened_data:
# Split the flattened data into multiple sheets if needed
max_rows_per_sheet = 65000
num_sheets = len(flattened_df) // max_rows_per_sheet + 1
for i in range(num_sheets):
start_row = i * max_rows_per_sheet
end_row = (i + 1) * max_rows_per_sheet
sheet_name = f'Flattened Data {i + 1}'
flattened_df.iloc[start_row:end_row].to_excel(writer, sheet_name=sheet_name, index=False)
# Write statistics for various fields
detected_events.to_excel(writer, sheet_name='Detection Results', index=False)
user_login_tracker_df.to_excel(writer, sheet_name='User Login Tracker', index=False)
password_spray_df.to_excel(writer, sheet_name='Password Spray Attacks', index=False)
user_operations_df.to_excel(writer, sheet_name='User Operations', index=False)
user_operation_by_day_df.to_excel(writer, sheet_name='User Operations by Day', index=False)
flattened_df['Operation'].value_counts().to_frame().to_excel(writer, sheet_name='Operation Stats')
flattened_df['ClientIP'].value_counts().to_frame().to_excel(writer, sheet_name='ClientIP Stats')
flattened_df['Country'].value_counts().to_frame().to_excel(writer, sheet_name='Country Stats')
flattened_df['UserAgent'].value_counts().to_frame().to_excel(writer, sheet_name='UserAgent Stats')
flattened_df['UserId'].value_counts().to_frame().to_excel(writer, sheet_name='UserId Stats')
flattened_df['AuthenticationType'].value_counts().to_frame().to_excel(writer,
sheet_name='AuthenticationType Stats')
# Measure the end time
end_time = time.time()
print(f"Office365 analysis finished in time: {end_time - start_time:.2f} seconds")
except Exception as e:
print(f"An error occurred during the analysis: {e}")
finally:
#Clean up the temporary directory
if os.path.exists(temp_dir):
for file in Path(temp_dir).glob('*'):
file.unlink() # Delete the file
os.rmdir(temp_dir) # Remove the directory
# Write the User Login Tracker results to a new sheet
# Measure the end time
end_time = time.time()
# Calculate and print the running time
running_time = end_time - start_time
print(f"Office365 hunter finished in time: {running_time:.2f} seconds")

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 9.9 KiB

After

Width:  |  Height:  |  Size: 9.9 KiB

@ -1,11 +1,11 @@
evtx
netaddr
numpy
pandas
python-dateutil
pytz
six
XlsxWriter
flatten_json
geoip2
evtx
netaddr
numpy
pandas
python-dateutil
pytz
six
XlsxWriter
flatten_json
geoip2
requests

File diff suppressed because one or more lines are too long

@ -1,19 +1,19 @@
User,SID
01566S-WIN16-IR$,S-1-5-18
ANONYMOUS LOGON,S-1-5-7
IEUser,S-1-5-21-3461203602-4096304019-2269080069-1000
Administrator,S-1-5-21-308926384-506822093-3341789130-500
samir,S-1-5-21-308926384-506822093-3341789130-220106
02694W-WIN10$,S-1-5-21-308926384-506822093-3341789130-84104
Administrator,S-1-5-21-81107902-1099128984-1836738286-500
EXCHANGE$,S-1-5-21-2895268558-4179327395-2773671012-1108
IEUser,S-1-5-21-3583694148-1414552638-2922671848-1000
lgrove,S-1-5-21-308926384-506822093-3341789130-101606
a-jbrown,S-1-5-21-308926384-506822093-3341789130-1106
user01,S-1-5-21-1587066498-1489273250-1035260531-1106
Administrator,S-1-5-21-1587066498-1489273250-1035260531-500
Administrator,S-1-5-21-1587066498-1489273250-1035260531-500
sshd_server,S-1-5-21-3583694148-1414552638-2922671848-1002
LOCAL SERVICE,S-1-5-19
NETWORK SERVICE,S-1-5-20
admin01,S-1-5-21-1587066498-1489273250-1035260531-1108
User,SID
01566S-WIN16-IR$,S-1-5-18
ANONYMOUS LOGON,S-1-5-7
IEUser,S-1-5-21-3461203602-4096304019-2269080069-1000
Administrator,S-1-5-21-308926384-506822093-3341789130-500
samir,S-1-5-21-308926384-506822093-3341789130-220106
02694W-WIN10$,S-1-5-21-308926384-506822093-3341789130-84104
Administrator,S-1-5-21-81107902-1099128984-1836738286-500
EXCHANGE$,S-1-5-21-2895268558-4179327395-2773671012-1108
IEUser,S-1-5-21-3583694148-1414552638-2922671848-1000
lgrove,S-1-5-21-308926384-506822093-3341789130-101606
a-jbrown,S-1-5-21-308926384-506822093-3341789130-1106
user01,S-1-5-21-1587066498-1489273250-1035260531-1106
Administrator,S-1-5-21-1587066498-1489273250-1035260531-500
Administrator,S-1-5-21-1587066498-1489273250-1035260531-500
sshd_server,S-1-5-21-3583694148-1414552638-2922671848-1002
LOCAL SERVICE,S-1-5-19
NETWORK SERVICE,S-1-5-20
admin01,S-1-5-21-1587066498-1489273250-1035260531-1108
1 User SID
2 01566S-WIN16-IR$ S-1-5-18
3 ANONYMOUS LOGON S-1-5-7
4 IEUser S-1-5-21-3461203602-4096304019-2269080069-1000
5 Administrator S-1-5-21-308926384-506822093-3341789130-500
6 samir S-1-5-21-308926384-506822093-3341789130-220106
7 02694W-WIN10$ S-1-5-21-308926384-506822093-3341789130-84104
8 Administrator S-1-5-21-81107902-1099128984-1836738286-500
9 EXCHANGE$ S-1-5-21-2895268558-4179327395-2773671012-1108
10 IEUser S-1-5-21-3583694148-1414552638-2922671848-1000
11 lgrove S-1-5-21-308926384-506822093-3341789130-101606
12 a-jbrown S-1-5-21-308926384-506822093-3341789130-1106
13 user01 S-1-5-21-1587066498-1489273250-1035260531-1106
14 Administrator S-1-5-21-1587066498-1489273250-1035260531-500
15 Administrator S-1-5-21-1587066498-1489273250-1035260531-500
16 sshd_server S-1-5-21-3583694148-1414552638-2922671848-1002
17 LOCAL SERVICE S-1-5-19
18 NETWORK SERVICE S-1-5-20
19 admin01 S-1-5-21-1587066498-1489273250-1035260531-1108

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 10 KiB

After

Width:  |  Height:  |  Size: 10 KiB

Before

Width:  |  Height:  |  Size: 236 KiB

After

Width:  |  Height:  |  Size: 236 KiB

Before

Width:  |  Height:  |  Size: 86 KiB

After

Width:  |  Height:  |  Size: 86 KiB

Before

Width:  |  Height:  |  Size: 70 KiB

After

Width:  |  Height:  |  Size: 70 KiB

Before

Width:  |  Height:  |  Size: 222 KiB

After

Width:  |  Height:  |  Size: 222 KiB

@ -1,101 +1,101 @@
try{
New-Item -ItemType "directory" -Path "wineventlog"
}
catch
{
echo "can't create a new directory"
}
try{
get-eventlog -log Security | export-csv wineventlog/Security.csv
}
catch
{
echo "Can't retrieve Security Logs"
}
try
{
Get-WinEvent -LogName System | export-csv wineventlog/System.csv
}
catch
{
echo "Can't retrieve System Logs"
}
try{
Get-WinEvent -LogName Application | export-csv wineventlog/Application.csv
}
catch
{
echo "Can't retrieve Application Logs"
}
try{
Get-WinEvent -LogName "Windows PowerShell" | export-csv wineventlog/Windows_PowerShell.csv
}
catch
{
echo "Can't retrieve Windows PowerShell Logs"
}
try{
Get-WinEvent -LogName "Microsoft-Windows-TerminalServices-LocalSessionManager/Operational" | export-csv wineventlog/LocalSessionManager.csv
}
catch
{
echo "Can't retrieve Microsoft-Windows-TerminalServices-LocalSessionManager/Operational Logs"
}
try{
Get-WinEvent -LogName "Microsoft-Windows-Windows Defender/Operational" | export-csv wineventlog/Windows_Defender.csv
}
catch
{
echo "Can't retrieve Microsoft-Windows-Windows Defender/Operational Logs"
}
try{
Get-WinEvent -LogName Microsoft-Windows-TaskScheduler/Operational | export-csv wineventlog/TaskScheduler.csv
}
catch
{
echo "Can't retrieve Microsoft-Windows-TaskScheduler/Operational Logs"
}
try{
Get-WinEvent -LogName Microsoft-Windows-WinRM/Operational | export-csv wineventlog/WinRM.csv
}
catch
{
echo "Can't retrieve Microsoft-Windows-WinRM/Operational Logs"
}
try{
Get-WinEvent -LogName Microsoft-Windows-Sysmon/Operational | export-csv wineventlog/Sysmon.csv
}
catch
{
echo "Can't retrieve Microsoft-Windows-Sysmon/Operational Logs"
}
try{
Get-WinEvent -LogName Microsoft-Windows-PowerShell/Operational | export-csv wineventlog/Powershell_Operational.csv
}
catch
{
echo "Can't retrieve Microsoft-Windows-PowerShell/Operational Logs"
}
try
{
Compress-Archive -Path wineventlog -DestinationPath ./logs.zip
}
catch
{
echo "couldn't compress the the log folder "
}
try{
New-Item -ItemType "directory" -Path "wineventlog"
}
catch
{
echo "can't create a new directory"
}
try{
get-eventlog -log Security | export-csv wineventlog/Security.csv
}
catch
{
echo "Can't retrieve Security Logs"
}
try
{
Get-WinEvent -LogName System | export-csv wineventlog/System.csv
}
catch
{
echo "Can't retrieve System Logs"
}
try{
Get-WinEvent -LogName Application | export-csv wineventlog/Application.csv
}
catch
{
echo "Can't retrieve Application Logs"
}
try{
Get-WinEvent -LogName "Windows PowerShell" | export-csv wineventlog/Windows_PowerShell.csv
}
catch
{
echo "Can't retrieve Windows PowerShell Logs"
}
try{
Get-WinEvent -LogName "Microsoft-Windows-TerminalServices-LocalSessionManager/Operational" | export-csv wineventlog/LocalSessionManager.csv
}
catch
{
echo "Can't retrieve Microsoft-Windows-TerminalServices-LocalSessionManager/Operational Logs"
}
try{
Get-WinEvent -LogName "Microsoft-Windows-Windows Defender/Operational" | export-csv wineventlog/Windows_Defender.csv
}
catch
{
echo "Can't retrieve Microsoft-Windows-Windows Defender/Operational Logs"
}
try{
Get-WinEvent -LogName Microsoft-Windows-TaskScheduler/Operational | export-csv wineventlog/TaskScheduler.csv
}
catch
{
echo "Can't retrieve Microsoft-Windows-TaskScheduler/Operational Logs"
}
try{
Get-WinEvent -LogName Microsoft-Windows-WinRM/Operational | export-csv wineventlog/WinRM.csv
}
catch
{
echo "Can't retrieve Microsoft-Windows-WinRM/Operational Logs"
}
try{
Get-WinEvent -LogName Microsoft-Windows-Sysmon/Operational | export-csv wineventlog/Sysmon.csv
}
catch
{
echo "Can't retrieve Microsoft-Windows-Sysmon/Operational Logs"
}
try{
Get-WinEvent -LogName Microsoft-Windows-PowerShell/Operational | export-csv wineventlog/Powershell_Operational.csv
}
catch
{
echo "Can't retrieve Microsoft-Windows-PowerShell/Operational Logs"
}
try
{
Compress-Archive -Path wineventlog -DestinationPath ./logs.zip
}
catch
{
echo "couldn't compress the the log folder "
}

@ -1,101 +1,101 @@
try{
New-Item -ItemType "directory" -Path "wineventlog"
}
catch
{
echo "can't create a new directory"
}
try{
wevtutil epl Security wineventlog/Security.evtx
}
catch
{
echo "Can't retrieve Security Logs"
}
try
{
wevtutil epl System wineventlog/System.evtx
}
catch
{
echo "Can't retrieve System Logs"
}
try{
wevtutil epl Application wineventlog/Application.evtx
}
catch
{
echo "Can't retrieve Application Logs"
}
try{
wevtutil epl "Windows PowerShell" wineventlog/Windows_PowerShell.evtx
}
catch
{
echo "Can't retrieve Windows PowerShell Logs"
}
try{
wevtutil epl "Microsoft-Windows-TerminalServices-LocalSessionManager/Operational" wineventlog/LocalSessionManager.evtx
}
catch
{
echo "Can't retrieve Microsoft-Windows-TerminalServices-LocalSessionManager/Operational Logs"
}
try{
wevtutil epl "Microsoft-Windows-Windows Defender/Operational" wineventlog/Windows_Defender.evtx
}
catch
{
echo "Can't retrieve Microsoft-Windows-Windows Defender/Operational Logs"
}
try{
wevtutil epl Microsoft-Windows-TaskScheduler/Operational wineventlog/TaskScheduler.evtx
}
catch
{
echo "Can't retrieve Microsoft-Windows-TaskScheduler/Operational Logs"
}
try{
wevtutil epl Microsoft-Windows-WinRM/Operational wineventlog/WinRM.evtx
}
catch
{
echo "Can't retrieve Microsoft-Windows-WinRM/Operational Logs"
}
try{
wevtutil epl Microsoft-Windows-Sysmon/Operational wineventlog/Sysmon.evtx
}
catch
{
echo "Can't retrieve Microsoft-Windows-Sysmon/Operational Logs"
}
try{
wevtutil epl Microsoft-Windows-PowerShell/Operational wineventlog/Powershell_Operational.evtx
}
catch
{
echo "Can't retrieve Microsoft-Windows-PowerShell/Operational Logs"
}
try
{
Compress-Archive -Path wineventlog -DestinationPath ./logs.zip
}
catch
{
echo "couldn't compress the the log folder "
}
try{
New-Item -ItemType "directory" -Path "wineventlog"
}
catch
{
echo "can't create a new directory"
}
try{
wevtutil epl Security wineventlog/Security.evtx
}
catch
{
echo "Can't retrieve Security Logs"
}
try
{
wevtutil epl System wineventlog/System.evtx
}
catch
{
echo "Can't retrieve System Logs"
}
try{
wevtutil epl Application wineventlog/Application.evtx
}
catch
{
echo "Can't retrieve Application Logs"
}
try{
wevtutil epl "Windows PowerShell" wineventlog/Windows_PowerShell.evtx
}
catch
{
echo "Can't retrieve Windows PowerShell Logs"
}
try{
wevtutil epl "Microsoft-Windows-TerminalServices-LocalSessionManager/Operational" wineventlog/LocalSessionManager.evtx
}
catch
{
echo "Can't retrieve Microsoft-Windows-TerminalServices-LocalSessionManager/Operational Logs"
}
try{
wevtutil epl "Microsoft-Windows-Windows Defender/Operational" wineventlog/Windows_Defender.evtx
}
catch
{
echo "Can't retrieve Microsoft-Windows-Windows Defender/Operational Logs"
}
try{
wevtutil epl Microsoft-Windows-TaskScheduler/Operational wineventlog/TaskScheduler.evtx
}
catch
{
echo "Can't retrieve Microsoft-Windows-TaskScheduler/Operational Logs"
}
try{
wevtutil epl Microsoft-Windows-WinRM/Operational wineventlog/WinRM.evtx
}
catch
{
echo "Can't retrieve Microsoft-Windows-WinRM/Operational Logs"
}
try{
wevtutil epl Microsoft-Windows-Sysmon/Operational wineventlog/Sysmon.evtx
}
catch
{
echo "Can't retrieve Microsoft-Windows-Sysmon/Operational Logs"
}
try{
wevtutil epl Microsoft-Windows-PowerShell/Operational wineventlog/Powershell_Operational.evtx
}
catch
{
echo "Can't retrieve Microsoft-Windows-PowerShell/Operational Logs"
}
try
{
Compress-Archive -Path wineventlog -DestinationPath ./logs.zip
}
catch
{
echo "couldn't compress the the log folder "
}
Loading…
Cancel
Save