commit bbf72f5ec3eae5c7201b83f423135fabdb7e4063 Author: vgallegoiz Date: Mon Jul 22 09:16:24 2024 +0200 Add UC_Validation.ps1 diff --git a/UC_Validation_ps.ps1 b/UC_Validation_ps.ps1 new file mode 100644 index 0000000..cff0195 --- /dev/null +++ b/UC_Validation_ps.ps1 @@ -0,0 +1,252 @@ + +# ======================================== QA =============================================== +# $API_TOKEN = "777A7F57-CBAD-4B13-830E-3A8048E1B58B" +# $EVENT_COLLECTOR_URL = "https://inhas71351.eu.boehringer.com:8088/services/collector" +# $LOG_INJECTION_FILE = "C:\Users\carretea\Desktop\DFIR_Team\UC_TestBank\LogsInjection.json" +# ================================================================================================= + +# ======================================== PROD =============================================== +$API_TOKEN = "28B66E41-FFC9-474F-AF99-7E7A36EEA2DE" +$EVENT_COLLECTOR_URL = "https://inhas71353.eu.boehringer.com:8088/services/collector" +$SERVICENOW_URL = "https://boehringer.service-now.com/api/now/table/incident" +$SERVICENOW_USERNAME = "your_username" +$SERVICENOW_PASSWORD = "your_password" + +# $LOG_INJECTION_FILE = "C:\Users\carretea\Desktop\DFIR_Team\UC_TestBank\LogsInjection.json" +# ================================================================================================= + +<# +Param ( + [Parameter(Mandatory=$true,HelpMessage="Specify date. Format must be dd/mm/yyyy")] + [Alias("Time")] + [ValidateScript( + { + try{[datetime]::ParseExact($psitem ,"dd/mm/yyyy" ,[System.Globalization.CultureInfo](Get-Culture))}catch{throw "Datetime is in the wrong format ($_) Use this format 29/12/2022"} + })] + [String]$date +) +#> + +# Next steps: +# 1. Set time difference for each category +# 2. Collect all commands that do work +# 3. Document everything in Confluence + + + +# ======== GLOBAL VARIABLES ========= +$TIMEDIFF=5; +$DATA_PATH="." + + +function Subs_Time { + + param( + [string]$command, + [string]$time + ); + + $aux=$command -replace "THISISTHETIME", $time + + return $aux + +} + +function Sub_Word { + + param( + [string] $keyword, + [string] $replacement, + [string] $command + ); + + $aux=$command -replace $keyword, $replacement + + return $aux +} + +function New-ServiceNowIncident{ + param( + [string]$shortDescription, + [string]$description + ) + + $body = @{ + short_description = $shortDescription + description = $description + } | ConvertTo-Json + + $headers = @{ + Authorization = "Basic " + [Convert]::ToBase64String([Text.Encoding]::ASCII.GetBytes("$($SERVICENOW_USERNAME):$($SERVICENOW_PASSWORD)")) + "Content-Type" = "application/json" + } + + Invoke-WebRequest -Uri $SERVICENOW_URL -Method Post -Headers $headers -Body $body +} + +# ==================================================================== +# DATASET GENERATOR +# ==================================================================== + +$category=""; +$time=""; + +$usecases = [ordered]@{} + +# Content is read from CSV file +$csvfile = Import-CSV -Path "$DATA_PATH\commands_mod.csv" + +# A backup is created for both the dataset and the results.txt +Copy-Item $DATA_PATH"\dataset.json" -Destination $DATA_PATH"\dataset_backup.json" +Copy-Item $DATA_PATH"\results.txt" -Destination $DATA_PATH"\results_backup.txt" + +# Last dataset created is deleted +Remove-Item $DATA_PATH"\dataset.json" +Remove-Item $DATA_PATH"\results.txt" + +# Each rule title is taken from "commands.csv" and a JSON object is created, which is inserted in "dataset.json" +foreach($row in $csvfile) { + + $category = $row.Category + + if ($category -notmatch $prev_category){ + + # Depending on the log's type, the timestamp has to be adjusted. + Switch ($category) + { + "01 - DC/Domain Admin" { $d=(Get-Date -Format "yyyy-MM-dd"); $t=(Get-Date -Format "hh:mm:ss.fff"); $time=$d+"T"+$t; } # $time="0" + "02 - EDR (MDE)" { $d=(Get-Date -Format "yyyy-MM-dd"); $t=[regex]::match((Get-Date).AddHours(-1.0333),"[0-9]{2}:[0-9]{2}:[0-9]{2}").value; $time=$d+"T"+$t+"Z"; } # $time="-60" + "02 - EDR (MDCA)" { $d=(Get-Date -Format "yyyy-MM-dd"); $t=[regex]::match((Get-Date).AddHours(-1.0333),"[0-9]{2}:[0-9]{2}:[0-9]{2}").value; $time=$d+"T"+$t; } # $time="-60" + "02 - EDR (MDI)" { $d=(Get-Date -Format "yyyy-MM-dd"); $t=[regex]::match((Get-Date).AddHours(-1.0333),"[0-9]{2}:[0-9]{2}:[0-9]{2}").value; $time=$d+"T"+$t; } # $time="-60" + "02 - EDR (MD365)" { $d=(Get-Date -Format "yyyy-MM-dd"); $t=[regex]::match((Get-Date).AddHours(-1.0333),"[0-9]{2}:[0-9]{2}:[0-9]{2}").value; $time=$d+"T"+$t; } # $time="-60" + "02 - EDR (MDO)" { $d=(Get-Date -Format "yyyy-MM-dd"); $t=[regex]::match((Get-Date).AddHours(-1.0333),"[0-9]{2}:[0-9]{2}:[0-9]{2}").value; $time=$d+"T"+$t; } # $time="-60" + "03 - AWS" { $time="" } # Cloudtrail: -60 ;; aws:firehose: Unknown + "04 - AV" { $time=(Get-Date -Format "yyyy-MM-dd hh:mm:ss") } # $time="0" + "04 - AV (sophosindex)" { (Get-Date -Format "yyyy-MM-dd hh:mm:ss") } # $time="0" + "04 - AV (wdindex)" { (Get-Date -Format "yyyy-MM-dd hh:mm:ss") } # $time="0" + # "05 - SNET" { $time="0" } # $time="0" + "06 - WAF" { $time=(Get-Date -Format "MMM dd hh:mm:ss") } # $time="0" + "07 - Firewall" { $time=(Get-Date -Format "MMM dd hh:mm:ss") } # $time="0" + "07 - Firewall (Flows)" { $time=(Get-Date -Format "MMM dd hh:mm:ss") } # $time="0" + "07 - Firewall (PA)" { $time=(Get-Date -Format "MMM dd hh:mm:ss") } # $time="0" + "08 - Zscaler" { $d=(Get-Date -Format "MMM dd"); $t=[regex]::match((Get-Date).AddHours(-1.0333),"[0-9]{2}:[0-9]{2}:[0-9]{2}").value; $time=$d+" "+$t; } # $time="-60" + "08 - Zscaler (Sandbox)" { $aux=(Get-Date -UFormat %s).split('.')[0]; $time=$aux-($TIMEDIFF*60); } # $time="0" + "09 - Illumio" { $d=(Get-Date -Format "yyyy-MM-dd"); $t=[regex]::match((Get-Date).AddHours(-1.0333),"[0-9]{2}:[0-9]{2}:[0-9]{2}").value; $time=$d+"T"+$t+"Z"; } # $time="-60" + "10 - OT" { $d=(Get-Date -Format "yyyy-MM-dd"); $t=[regex]::match((Get-Date).AddHours(-0.0333),"[0-9]{2}:[0-9]{2}:[0-9]{2}").value; $time=$d+"T"+$t+".000+01:00"; } # $time="0" + # "11 - Scans" { $time="0" } # $time="0" + + "12 - Others (Aquasec)" { $aux=(Get-Date -UFormat %s).split('.')[0]; $time=$aux-($TIMEDIFF*60); } # $time="0" + "12 - Others (CAlogs)" { $time="" } # No logs available + "12 - Others (Mail2Siem)" { $time="" } # Can't inject log + "12 - Others (McAfeeADS)" { $time="" } # Can't inject log + "12 - Others (Modsecure)" { $time=(Get-Date -Format "ddd MMM dd hh:mm:ss") } # Can't inject log + "12 - Others (Netscaler)" { $time=(Get-Date -Format "MMM dd hh:mm:ss") } # $time="0" + + "12 - Others (SAPETD)" { $aux=(Get-Date -UFormat %s).split('.')[0]; $time=$aux-($TIMEDIFF*60); } # $time="0" + + } + } + + # Corresponding time is inserted in command + $c1_aux="" + $c2_aux="" + $c1=$row."Command (I)" + $c2=$row."Command (II)" + + # Write-Host $time + + # Time is included in each command + if(!([string]::IsNullOrWhitespace($c1))){ $c1_aux = Subs_Time -command $c1 -time $time; } # Write-Output $c1_aux + if(!([string]::IsNullOrWhitespace($c2))){ $c2_aux = Subs_Time -command $c2 -time $time; } + + # JSON object is created + $line = $row."UC Title" + + $usecases[$line] = [pscustomobject]@{ + category = $category + command_1 = $c1_aux + command_2 = $c2_aux + time = $time + } + + # Previous category takes actual's category value + $prev_category = $category + +} + +# JSON objects are written into output file +$usecases | ConvertTo-Json | Add-Content -Path .\dataset.json + +# ==================================================================== +# LOG INJECTOR (Basic) +# ==================================================================== + +# Content is read from JSON file containing JSON objects +$json = Get-Content -Path "$DATA_PATH\dataset.json" | ConvertFrom-Json + +Add-Content "$DATA_PATH\results.txt" "TITLE,CATEGORY,STATUS" + +# foreach Use Case, command is extracted and inserted in the "dataset" file +foreach ($uc in $json.PSObject.Properties) +{ + $index=1 + $title = $uc.Name + $command = $uc.Value."command_$index" + $category = $uc.Value."category" + + # UC being injected is written in the console + Write-host "Use case `"$title`" is being injected...`n" + + # Some throttle not to collapse Splunk + Start-Sleep -Seconds 1 + + while(!([string]::IsNullOrWhitespace($command))){ + + # API Token value is included + $temp = Sub_Word -keyword "APITOKEN" -replacement $API_TOKEN -command $command + + # Event Collector URL is included + $cmd_final = Sub_Word -keyword "EVENTCOLLECTORURL" -replacement $EVENT_COLLECTOR_URL -command $temp + + # curl.exe command is executed + $commandOutput = Invoke-Expression $cmd_final + + $index=$index+1 + $command = $uc.Value."command_$index" + } + + # The following lines writes in results.txt if the use case submitted was sent successfully or not. + if ($commandOutput -match "{`"text`":`"Success`",`"code`":0}"){ + Add-Content "$DATA_PATH\results.txt" "$title,$category,SUCCESS" + } + else { + Add-Content "$DATA_PATH\results.txt" "$title,$category,FAILED" + } +} + +<# + +# ==================================================================== +# LOG INJECTOR (Fancy) +# ==================================================================== + +# Using the following code, the command is not retrieved from the CSV file, but build based on the JSON file's content (which is fancier, but has the same outcome) + +$json = Get-Content -Path "C:\Users\carretea\Desktop\DFIR_Team\UC_TestBank\LogsInjection.json" | Out-String | ConvertFrom-json +$counter = 0 + +$json.UseCases.RuleName.length + +foreach ($uc in ($json.UseCases | Get-Member -Type NoteProperty).Name){ + Write-Output "Injecting logs for Use Case $uc..." + + foreach {$BB in $json.UseCases.$uc}{ + $payload = $BB.payload + $sourcetype = $BB.sourcetype + curl.exe $EVENT_COLLECTOR_URL -H "Authorization: Splunk $API_TOKEN" -d '{\"event\": \"$payload\", \"sourcetype\": \"$sourcetype\"}' + + Start-Sleep -Seconds 10 + } +} + +#> \ No newline at end of file