Thursday, December 20, 2018

CertAuto

For a developer environment I had a few certificates that needed automatic renewal at certain intervals. The script below uses the PSPKI module to check if certificates are expiring on IIS-Site in X days and if so, renews the agains a local certification authority.
The PSPKI -module is really god for sending and approving certification requests  via Powershell when Microsofts own tools leaves room for improvement.

This is really only for a testenviroment and offline-enviroments.
Folder structure needs to created to look below




Certini contains the template for the certificate like below for example
 [Version]
Signature = "$Windows NT$"
[NewRequest]
Subject = "C=US,S=CA,L=OHIO,O=Fabrikam,OU=IT,CN=ajax.aspnetcdn.com"
Exportable = TRUE
KeyLength = 4096
KeySpec = 1
KeyUsage = 0xa0
MachineKeySet = True
ProviderName = "Microsoft RSA SChannel Cryptographic Provider"
ProviderType = 12
Silent = True
SMIME = False
RequestType = PKCS10
FriendlyName = "ajax.aspnetcdn.com"

$scriptPath = $(split-path -Parent $myinvocation.MyCommand.Definition)
Import-Module "$scriptPath\certAutoTools.psm1" -Force
import-module "$($scriptPath)\PSPKI-v3.4.1.0\PSPKI.psm1"
#settings
$logFile = "$($scriptPath)\lastRun.txt"
$destinationPath = "$($scriptPath)\generatedCertReq\"
$sourcePath = "$($scriptPath)\certIni" #where Ini-files are stored
$certPath = "$($scriptPath)\generatedCerts" #location for .cer output
$iisSites = "IISSite1","IISSite2"
$rootCertAuth = "myRootCertAuthority"
#settings end
Start-Transcript -Path $logFile
#check if certrenewal is needed
$certificate = get-certOnIIS -IisSites $iisSites |sort-object notafter |select -first 1
$certExpires = get-certExpires -certificate $certificate -days 238
if ($certExpires) {
write-output "Found expiring certificates, moving on with renewal"
#create certs on frontends
new-CertRequest -destinationPath $destinationPath -sourcePath $sourcePath
#renew certs on certsrv
submit-certRequest -sourcePath $destinationPath -certAuthName $rootCertAuth -outputPath $certPath
#import certs on frontends
$createdCerts = import-certs -sourcePath $certPath
#import certs to array
$newCerts = get-certsFromFile -certPath $certPath
#get current cert on sites
$iisMappings = get-iisBindingCerts -iisSites $iisSites
#set cert on iis-sites
set-sslBindings -iismappings $iisMappings -newCerts $newcerts
#clean up
Get-ChildItem -Path $certPath |copy-Item -Destination "$($certPath)\old\" -Force
Get-ChildItem -path $certpath |where-object {$_.psiscontainer -eq $false} |Remove-Item
Get-ChildItem -Path $destinationPath -Filter *.txt |Remove-Item
#remove previous certs
foreach ($mapping in $iismappings) {
Get-ChildItem -Path Cert:\LocalMachine\my |where-object {$_.Thumbprint -like $mapping.certhash} |remove-item
}
}#end check certexpires
else {
write-output "No certificate needed renewals"
}
Stop-Transcript
function new-CertRequest {
param(
$destinationPath,
$sourcePath
)
$outputFolder = $destinationPath
$items = Get-ChildItem -Path $sourcepath -Filter *.ini
$date= get-date -Format "yyyyMMdd_HHmm"
foreach ($item in $items) {
write-output "Creating certificate request for $($item.name)"
certreq.exe -new $item.VersionInfo.FileName "$outputFolder$($item.name.trimend('.ini'))_$($env:COMPUTERNAME)_$date.txt"
$filesToCopy += "$outputFolder$($item.name.trimend('.ini'))_$($env:COMPUTERNAME)_$date.txt"
write-output "$outputFolder$($item.name.trimend('.ini'))_$($env:COMPUTERNAME)_$date.txt"
}
}
function submit-certRequest {
param(
$sourcePath,
$certAuthName,
$outputPath,
$latest = 4
)
$certReqs = Get-ChildItem -Path $sourcePath -Filter "*.txt" |Sort-Object lastwritetime |select -First $latest
$ca = Get-CertificationAuthority -Name $certAuthName
$ids = @()
foreach ($certReq in $certReqs) {
$request = Submit-CertificateRequest -Path $certReq.FullName -CertificationAuthority $ca
$ids += $request.RequestID
}
Get-PendingRequest -CertificationAuthority $ca |select -first 1
foreach ($id in $ids) {
Get-PendingRequest -CertificationAuthority $ca -RequestID $id |Approve-CertificateRequest
$mycert = Get-IssuedRequest -CertificationAuthority $ca -RequestID $id |Receive-Certificate -Path $outputPath -Force
$File = Get-ChildItem -Path "$($outputPath)\RequestID_$id.cer"
$fileName = "cert_$($mycert.Subject.split(".")[-2]).cer"
Rename-Item -Path $file.FullName -NewName $fileName
}
}
function import-certs {
param(
$sourcePath
)
$items = Get-ChildItem -Path $sourcePath -Filter *.cer
if ($items.count -ne $null -and $items.count -gt 0) {
foreach ($item in $items) {
write-output "importing $($item.versioninfo.filename)..."
certreq.exe -accept -machine $item.VersionInfo.FileName
}
}
else { write-output "No files found in $sourcepath"}
}
function get-certOnIIS {
param(
$IisSites
)
$certCollection = @()
foreach ($IisSite in $IisSites) {
$bindings = Get-WebBinding -Name $IisSite |where-object {$_.protocol -like "https"}
foreach ($binding in $bindings) {
write-output "Adding $($binding.certificatehash) on $IisSite"
$certCollection += (Get-ChildItem -Path cert:\ -Recurse |where-object {$_.Thumbprint -like $binding.certificateHash})
}
}
return $certCollection
}
function get-certExpires {
param(
[System.Security.Cryptography.X509Certificates.X509Certificate]$certificate,
[int]$days
)
$timspan = New-TimeSpan -Start (get-date) -End $certificate.NotAfter
if ($timspan.Days -le $days) {
return $false
}
else {
return $true
}
}
function get-iisBindingCerts {
param(
$iisSites
)
$mappings = @()
foreach ($websiteName in $iisSites) {
$allbindings = Get-WebBinding -Name $websiteName -Protocol https
foreach ($allbinding in $allbindings) {
$currentCert = Get-ChildItem -Path Cert:\LocalMachine\my |where-object {$_.Thumbprint -eq $allbinding.certificateHash}
$mappings += new-object psobject -Property @{iis=$websiteName;bindingInformation=$allbinding.bindingInformation;certhash=$allbinding.certificateHash;friendlyName=$currentCert.FriendlyName;dnsName=$currentCert.DnsNameList.unicode;subject=$currentCert.Subject}
}
}
return $mappings
}
function get-certsFromFile {
param(
$certPath
)
$items = Get-ChildItem -Path $certpath -Filter *.cer
$collection = @()
foreach ($item in $items) {
if ($item -ne $null) {
$crt = new-object System.Security.Cryptography.X509Certificates.X509Certificate
$crt.Import($item.VersionInfo.FileName)
$collection += new-object psobject -Property @{thumbprint=$crt.GetCertHashString();subject=$crt.Subject}
}
}
return $collection
}
function set-sslBindings {
param(
$iisMappings,
$newCerts
)
write-output "has $($iisMappings.count) iismappings and $($newcerts.count) newcerts"
foreach ($iisMapping in $iisMappings) {
$httpsBinding = get-webBinding -name $iisMapping.iis -protocol https |where-object {$_.bindingInformation -like $iisMapping.bindingInformation}
if ($httpsBinding -ne $null) {
$bindCert = $newCerts |where-object {$_.subject -like $iisMapping.subject} |select -first 1
if ($bindCert -ne $null) {
write-output "adding $($bindCert.thumbprint) to $($iismapping.bindingInformation)"
$httpsBinding.AddSslCertificate($bindCert.Thumbprint,'my')
write-output "binding $($httpsBinding.bindingInformation) to $($bindCert.subject)"
}
else {
write-output "Couldn't bind $($httpsBinding.bindingInformation) to $($bindCert.subject)"
}
}
else {write-output "webbinding empty $($iismapping.iis)" $($httpsBinding.getttype())}
}
}


References:
https://www.sysadmins.lv/projects/pspki/default.aspx  - PSPKI Module

Monday, June 04, 2018

Export IIS bindings

I find the cmdlet for exporting cmdlets a bit lacking so I built my own export function for fun and profit.
The function in the toolsfile collects all info and return a populated object that can be used to export list to file or do something else with.
$scriptPath = $(split-path -Parent $MyInvocation.MyCommand.Definition)
import-module "$($scriptPath)\xssWebTools.psm1" -Force
$Site1Headers = get-SiteBindings -siteName "site1"
$Site2Headers = get-SiteBindings -siteName "site2"
if ($Site1Headers -ne $null) {
$Site1Headers |export-csv -Path "$($scriptPath)\site1Addresses.csv" -Encoding UTF8 -NoTypeInformation
write-output "Printed Site1Headers to $($scriptPath)"
} else {write-output "No Site1Headers found"}
if ($Site2Headers -ne $null) {
$Site2Headers |export-csv -Path "$($scriptPath)\site2Addresses.csv" -Encoding UTF8 -NoTypeInformation
write-output "Printed Site2Headers to $($scriptPath)"
} else { write-output "No Site2Headers found"}
function get-SiteBindings {
param(
[parameter(mandatory=$true)]
[ValidateNotNullOrEmpty()]
$siteName
)
if (!(Get-Module -Name webadministration)) {import-module webadministration}
$selectedSite = Get-ChildItem -Path IIS:\Sites |where-object {$_.name -like $siteName}
if ($selectedSite.count -eq 1) {
$currentHostHeaders = Get-ItemProperty -Path $selectedSite.pspath -Name bindings.collection
$object = @()
foreach ($currentHostHeader in $currentHostHeaders) {
#$TrimmedHeader = $currentHostHeader.bindingInformation.TrimStart("*").trimstart(":80")
$TrimmedHeader = $currentHostHeader.bindingInformation.split(":")[-1]
$portHeader = $currentHostHeader.bindingInformation.split(":")[-2]
$tempObject = new-object psobject -Property @{hostheader=$TrimmedHeader;protocol=$currentHostHeader.protocol;port=$portHeader}
$object += $tempObject
}
return $object
}
elseif ($selectedSite.count -gt 1) {
write-warning "Too many hits. Skipping..."
return $null
}
elseif ($selectedSite.count -eq 0) {
write-warning "Couldn't find any sites matching '$siteName'!"
return $null
}
}

Thursday, May 17, 2018

Digging through a lot of files

I have a scenario where I need to scan a lot of xmlfiles from elmah-logs in the size of 300 000 files.

Powershell was a fun project for this but couldn't really do the job due to performance issues.
When reading 300 000 files using powershell like example below, the script ran for 368 minutes.
$scriptPath = $(split-path -Parent $myinvocation.MyCommand.Definition)
$path = "C:\elmahFiles"
$savedObject = "$scriptPath\savedObjectLatestWeeks.csv"
$startTime = get-date
write-output "script started at $startTime"
$counter = 0
$collection = get-childitem -path $path
$object = @()
$endtime = get-date
write-output "Found $($collection.count) files. Time before starting foreach $(($endtime-$starttime).totalseconds) seconds"
foreach ($item in $collection) {
if ([math]::IEEERemainder($counter,1000) -eq 0) {write-output "Iteration $($counter)"}
[xml]$tempxml = get-content $item.VersionInfo.filename
Remove-Variable tempAllHttp,temphttpreferer,temphttpuseragent,tempscriptname,temphost,tempip -ErrorAction SilentlyContinue
$tempALLHTTP = ($tempxml.error.serverVariables.ChildNodes |where-object {$_.name -like "ALL_HTTP"}).value.string
$tempHttpReferer = ($tempxml.error.serverVariables.ChildNodes |where-object {$_.name -like "HTTP_REFERER"}).value.string
$tempHttpUserAgent = ($tempxml.error.serverVariables.ChildNodes |where-object {$_.name -like "HTTP_USER_AGENT"}).value.string
$tempScriptName = ($tempxml.error.serverVariables.ChildNodes |where-object {$_.name -like "SCRIPT_NAME"}).value.string
$tempHost = $tempxml.error.host
$tempIP =($tempxml.error.serverVariables.ChildNodes |where-object {$_.name -like "HTTP_X_FORWARDED_FOR"}).value.string
$tempObj = new-object PSObject -Property @{message=$tempxml.error.message; Time=$tempxml.error.time;Filepath=$($item.VersionInfo.filename);details=$tempxml.error.detail;AllHttp=$tempALLHTTP;HttpReferer=$tempHttpReferer;UserAgent=$tempHttpUserAgent;ScriptName=$tempScriptName;Host=$tempHost;origin=$tempip}
$object += $tempobj
$counter++
}
$endtime = get-date
write-output "Done collecting in $(($endtime-$starttime).totalseconds) seconds ($(($endtime-$starttime).totalminutes) minutes)"
$object |Export-Csv -Path $savedObject -Encoding UTF8


I tried running parallell jobs and using dotnet to read files instead but nothing could complete with using MS Logparser.
So enter Log Parser Studio.
This neat tool managed to comb through 300 000 files in 47 minutes instead!
It is a bit tricky to formulate the queries however. Heres an example of getting elmah logs where a variable named HTTP_REFERER contains a key value

select * FROM '[LOGFILEPATH]' where  string like '%http://www.mycompany.com/subsite%' and name like 'HTTP_REFERER'

So in conclusion for same set of 300 000 files
Powershell took 368 minutes
Log Parser Studio took 47 minutes

References:

Thursday, May 03, 2018

Using hashtables to combine values

For a job I needed a good way of combining a primary value with optional subvalues in a xml-file.
Below is an example of how to extract attributes from a xmlelement into a hashtable and then join two hashtables into one. I couldn't find a method for extracting elements on the web, folks usually goes with subnodes instead of attributes. I'm partial to attributes and had to get creative, but the solution was quite simple. The Attributes from the element only show the actual elements from the xmlfile. Its not obvious when browsing the object that -name property can be lifted, but here it's used for creating a new hashtable-row



Samplescript for proof of concept
$ScriptPath = $(split-path -Parent $myinvocation.MyCommand.Definition)
Import-Module "$($ScriptPath)\Tools.psm1" -force
[xml]$xml = get-content -path "$($scriptPath)\websiteNodes.xml"
$nodes = $xml.SelectNodes("//webSites/webSite")
$collection = @()
foreach ($node in $nodes) {
$propertyDefault = @{url="test";name="test2";else="else"}
$propertyTemp = convert-xmlAttribToHash -xmlElement $node
$newProperty = Join-Hashtable -master $propertyTemp -child $propertyDefault
$object = new-object PSObject -Property $newProperty
$collection += $object
}
$collection[0]
Function Join-Hashtable {
[cmdletbinding()]
Param (
[hashtable]$Master,
[hashtable]$Child
)
#create clones of hashtables so originals are not modified
$Primary = $master.Clone()
$Secondary = $child.Clone()
#check for any duplicate keys
$duplicates = $Primary.keys | where {$Secondary.ContainsKey($_)}
if ($duplicates) {
foreach ($item in $duplicates) {
$Secondary.Remove($item)
}
}
#join the two hash tables
$result = $Primary+$Secondary
return $result
} #end Join-Hashtable
function convert-xmlAttribToHash {
param(
$xmlElement
)
$hashTable = @{}
foreach ($attrib in $xmlElement.Attributes) {
$hashTable.$($attrib.name) = $attrib.value
}
return $hashTable
}
view raw Tools.psm1 hosted with ❤ by GitHub
<webSites rootUrl="https://portal.mycompany.com/subsites/mastersite" defaultLanguage="1053" useParentTopNav="true" defaultTemplate="STS#0">
<webSite name="MerSupport" url="/support/mersupport" language="1033" />
<webSite name="Support" url="/support" language="1033" />
<webSite name="Support3" url="/support3" />
</webSites>


This eventually led to createwebsFromStructure.ps1 that uses powershell splatting to build commands with the resulting hashtable.

Add-PSSnapin microsoft.sharepoint.powershell
$scriptPath = $(split-path -Parent $MyInvocation.MyCommand.Definition)
Import-Module "$($scriptPath)\Tools.psm1"
$xmlStructureName = "webSiteNodes.xml"
$xmlStructurePath = Join-Path -Path $scriptPath -ChildPath $xmlStructureName
[xml]$xmlStructure = Get-Content -Path $xmlStructurePath
$root = $xmlStructure.SelectSingleNode("//webSites")
$rootUrl = $xmlStructure.webSites.rootUrl
$allWebSites = $xmlStructure.SelectNodes("//webSites/webSite")
#sort for order of handling first
foreach ($allWebSite in $allWebSites) {
$depth = $($allWebSite.url.Split("/").count)-1
$allWebSite |Add-Member -MemberType NoteProperty "UrlDepth" -Value $depth -Force
}
$rootProperty = @{language=$root.defaultLanguage;useParentTopNav=$root.useParentTopNav;Template=$root.defaultTemplate}
#create websites
foreach ($website in ($allWebSites|sort-object urldepth)) {
$fullUrl = "$($rootUrl)$($website.url)"
$checkExists = get-spweb $fullUrl -ErrorAction SilentlyContinue
if ($checkExists -eq $null) {
write-output "Creating site at $fullUrl"
#join parameters with defaults
$propertyTemp = @{}
foreach ($attrib in $website.Attributes) {
$propertyTemp.$($attrib.name) = $attrib.value
}
$propertyTemp.url = $propertyTemp.url.Insert(0,$rootUrl)
$newProperty = Join-Hashtable -master $propertyTemp -Child $rootProperty
if ($newProperty.useParentTopNav -eq 'true') {
write-output "Using topparentnav"
$newProperty.remove('UseParentTopNav')
New-SPWeb @newProperty -UseParentTopNav
}
else {
write-output "Not using topparentNav"
$newProperty.remove('UseParentTopNav')
New-SPWeb @newProperty
}
}
else { write-output "Site already exists. Skipping creation..." }
}


References:
https://powershell.org/2013/01/23/join-powershell-hash-tables/ - source for join-hashtable function
https://technet.microsoft.com/en-us/library/gg675931.aspx - source for details on splatting

Friday, January 12, 2018

Powershell and jobs

Powershell jobs are a bit of blunt instrument.

Here's two options for sending inputs for them
Scenario 1: Sending a string with all of our values and splitting them to an array when inside the scriptblock.
$inputString = "text1;text2;text3"
start-job -ArgumentList $inputString -ScriptBlock {
$listItems = @($args[0]).split(";")
write-output "I got $($listItems.count) items with me..."
foreach ($listItem in $listItems) {
write-output "Item: $listItem"
}
} -Name "Test"
get-job |where-object {$_.name -like "Test"} |receive-job -AutoRemoveJob -wait


Scenario2: Sending an object as input. Handy for configs and saves the need to define variablenames.
$ScriptPath = $(split-path -Parent $MyInvocation.MyCommand.Definition)
$inputs = new-object PSObject -Property @{url="Test48"; State="Added";scriptPath=$ScriptPath}
start-job -InputObject $inputs -ScriptBlock {
$data = @($input)
write-output "My output: $($data)"
write-output "url: $($data.url)"
write-output "state: $($data.state)"
write-output "scriptpath: $($data.scriptpath)"
} -Name "Test"
get-job |where-object {$_.name -like "Test"} |receive-job -AutoRemoveJob -wait

powershell assisting with mails

ExtractAttachedMails.ps1 is used for extracted alla attached mails in a outlook .msg-file to specified folder.
CatalogueMails.ps1 is used for indexing the mails to more easily filter on timestamps and senders.

This came in handy when a happy user sent me 45 attached mails with the same subjectline so the extractionprocess also throws in an indexnumber to avoid namingconflicts.

$scriptPath = $(split-path $myinvocation.mycommand.definition)
$outputPath = "$($scriptPath)\extractedMails"
$mails = get-childitem -Filter *.msg -Path $outputPath
$object = @()
foreach ($mail in $mails) {
$outlook = new-object -ComObject Outlook.application
try { $msg = $outlook.session.openshareditem($mail.fullname)
$tempObject = new-object psobject -Property @{body=$msg.body;Topic=$msg.ConversationTopic;to=$msg.to;timestamp=$msg.SentOn }
$object += $tempObject
}
catch { write-output "Coulnd't open $($mail.fullname)"}
}
$object |sort-object timestamp -Descending |select timestamp,Topic,to
$object |sort-object timestamp -Descending |select timestamp,Topic,to |export-csv -Path "$($scriptpath)\output.csv" -NoTypeInformation -Encoding UTF8
write-output "Total count: $($mails.count)"
$scriptPath = $(split-path $myinvocation.mycommand.definition)
$outputPath = "$($scriptPath)\extractedMails"
$inputPath = "$($scriptPath)\inputMails"
$mails = get-childitem -Path $inputPath -Filter *.msg
if ($mails -ne $null) {
$index = 0
foreach ($mail in $mails) {
$outlook = new-object -ComObject Outlook.application
$msgFn = $mail.FullName
$msg = $outlook.CreateItemFromTemplate($msgFn)
foreach ($attachment in $msg.Attachments) {
if ($attachment.FileName -like "*.msg") {
$attFn = "$outputPath\$($index)_$($attachment.FileName)"
$attachment.SaveAsFile($attFn)
$index++
}
else {write-output "Skipping, not a msg ($($attachment.filename))"}
}
}
write-output "Total count: $($mails.count). Extracted $index attachements"
}
else { write-output "No mails found $inputPath"}

Powershell and Uptimerobot

Uptimerobot can be quite tedious when you need to update many monitors at once. For example say you bought the license for Uptimerobot and n...