Simple Network Authentication

Friday, July 25, 2014
<#
Purpose:    Validate network credentials, preventing lockouts.
Notes:            This is a pretty simple script, accepting only one parameter($Creds).
                  Store your credentials in a variable with Get-Credential, and pass it
                  to this function.
Caveat(s):  None that I am aware of.
#>
 
Function ValidateCredentials($Creds){
            Add-Type -AssemblyName System.DirectoryServices.AccountManagement
            $Info = $Creds.GetNetworkCredential()
            if($info.Domain -eq ''){
                  $info.Domain = $env:USERDOMAIN
                  }
            $TypeDomain = [System.DirectoryServices.AccountManagement.ContextType]::Domain
            Try{
                  $PC = New-Object System.DirectoryServices.AccountManagement.PrincipalContext $TypeDomain,$info.Domain
                  $PC.ValidateCredentials($info.UserName,$info.Password)
                  Write-Verbose ("Authentication Success")
                  }
            Catch{
                  Write-Error ("Authentication Failed")
                  }
            }
Read more ...

Collecting Data from a Database Dump

Thursday, July 24, 2014
<#
Purpose:    Extract useful information from a database exported as multiple text files
Notes:        A little background:  One of my company's clients had requested a large bit
            of information from one of the State Departments, and they promptly exported a
            7-tabled database as a series of 1.9 Million-line text files.  Seven of them, to
            be exact.  Luckily, we only need data from three of them.  Our options were to 
            attempt to re-assemble the database (fixing errant linefeeds in the process), or 
            to get what we needed and get out.  We chose the latter.  Re-assembling a DB is just
            to unwieldy for standard user.  We were able to locate the foreign key in each line
            of the text file, and split that out to create what you will find below.
            This is a script that was written with assistance from one of the Geniuses I
            have the pleasure of working with.  I had a functional script, but the memory
            utilization was too high.  It took some time to figure out what logic was necessary,
            but twice as long to get the script stripped down to what you see here.
Caveat(s):    No known issues.  This was built for a very specific function, but you might be able
            to glean a bit of structural information from it.
#>

Set-Location "C:\DataDump\"

$RecordHash = @{}
$SplitString = ",`""
Get-Content (".\Data\Address.txt") -ReadCount 1000 | ForEach-Object{
    ($_ -replace '(?ms)([^"])\r\n','$1') -match 'Columbia' -split [environment]::NewLine | ForEach-Object{
        $BusinessRecord = [string]$_ -split $SplitString
        $CompanyID = $BusinessRecord.Split(',')[1]
        $RecordHash[$CompanyID] = [PSCustomObject]@{
            CompanyID = $CompanyID
            CompanyName = $null
            Date = "No date on file"
            Street1 = $BusinessRecord[1].trim('"')
            Street2 = $BusinessRecord[2].trim('"')
            Street3 = $BusinessRecord[3].trim('"')
            City = $BusinessRecord[4].trim('"')
            State = $BusinessRecord[5].trim('"')
            ZIP = $BusinessRecord[6].trim('"')
            }
        } 
    }

Filter AddCompanyName{
    if($RecordHash.containskey($_.Split(',')[1])){
         $RecordHash[$_.Split(',')[1]].CompanyName = ($_.Split(',')[2]).trim('"')
        }
     }
Get-Content ".\Data\CorporationName.txt" -ReadCount 1000 | ForEach-Object {
     $_ | AddCompanyName
     }

Filter AddDate{
    if($RecordHash.containskey($_.Split(',')[1])){
     $RecordHash[$_.Split(',')[1]].Date = Get-Date ([DateTime](($_.Split(',')[5]).trim('"'))) -Format "yyyy-MM-dd"
     }
    }
Get-Content ".\Data\Filing.txt" -ReadCount 1000 | ForEach-Object {
     $_ | AddDate
     }
     
<#
    This will return an array of objects in $RecordHash, indexed by the companyID - which
    was the foreign key in the text files.  In our situation, we just pushed it out as an
    HTML file for the client to view.
#>
Read more ...

A Quick Ping Solution

Sunday, July 13, 2014
<#

Purpose:    Quickly return Ping results for an array of computers.
Note:        I can't take all of the credit for this one - not much
            at all really.  A colleague of mine wrote the base that I've
            used in this script.  I just modified it for my purposes.
            I'm 110% that my co-worker could have tailored the original
            script to my needs in a more efficient manner than I have
            here; but again - I wanted the challenge.  The script will
            accept parameters though the pipeline, or as standard. 
            ie:
                [String]$Machines | .\FastPing.ps1
                .\FastPinger.ps1 $Machines
            As usual, any feedback is greatly appreciated!
Notes:        As you can see in the first usage example that I provided,
            when piping and array to the script, you have to first cast
            it as a string.  I'm not entirely sure why that is.  Ideas? 

#>

Param(
    [Parameter(ValueFromPipeline=$True)][String[]]$InputList
     )

$InputList = $InputList.split()

$ScriptBlock = {
    Param(
        $Machine
           )
    
    [int]$Timeout = 200
    $Options = New-Object System.Net.NetworkInformation.PingOptions
    $Options.TTL = 128
    $Options.DontFragment = $false
    $Buffer=([System.Text.Encoding]::ASCII).GetBytes('a'*32)
    $Ping = New-Object System.Net.NetworkInformation.Ping
    $Reply = $Ping.Send($Machine,$Timeout,$Buffer,$Options)

    Try{
        [Void][System.Net.Dns]::GetHostEntry($Hostname)
        }
    Catch{
        $Ping_Result = 'DNS Issue'
        }

    if($reply.status -eq 'Success'){
        $Ping_Result = 'Online'
        }
    else{
        $Ping_Result = 'Offline'
        }

    [PSCustomObject]@{
        Machine = $Machine
        Ping    = $Ping_Result
        }
    }

#Create a RunspacePool with a maximum size of 100
$RunspacePool = [RunspaceFactory]::CreateRunspacePool(1,100)
$RunspacePool.Open()

#Create a job for each Machine in the InputList, and send it to the RunspacePool
$Jobs = ForEach($Machine in $InputList){
    $Job = [PowerShell]::Create().AddScript($ScriptBlock).AddArgument($Machine)
     $Job.RunspacePool = $RunspacePool
    #Create an object 
    [PSCustomObject]@{
        Pipe = $Job
        Result = $Job.BeginInvoke()
        }
    }

$Results = $(ForEach ($Job in $Jobs){
    $Job.Pipe.EndInvoke($Job.Result)
    })

Return $Results

$RunspacePool.Close()
$RunspacePool.Dispose()
Read more ...

Renditional Folder Relocator

Sunday, July 13, 2014
<#

Purpose:    Relocate newly created directories from one directory to
            another, creating renditions if    necessary.
Notes:        I wrote this for a friend at work that was wanting a 
            script to monitor a folder (in this case via a scheduled
            task), and would move any directories that had not been 
            modified for greater than than a day.  There was a bit of 
            fear that the end-users would create directories that shared 
            names, so he wanted to be sure that the files were not 
            over-written.  I decided to    mitigate this risk by appending
            a revision number to the end of the filename.  A log was also
            required.  Any feedback is greatly appreciated!
Issue:        No known issues.  Use at your own peril!

#>

#Name the Directories
$LogFolder = "C:\Scripts\LogFolder\"
$SourceFolder = "C:\Scripts\MoveFrom\"
$EncryptedFolder = "C:\Scripts\MoveTo\"

#Query the folder where the new folders will be placed for folders that were modifified greater than one day ago.
$NewAudits = Get-ChildItem $SourceFolder -Directory | Where { $_.LastWriteTime -lt ((Get-Date).AddDays(-1)) }

#Query the destination folder (for later use).
$EncryptedAudits = Get-ChildItem $EncryptedFolder -Directory

#Instantiate a new array for storage.
$FolderStats = @()

if($NewAudits){
    #Iterate throught the NewAudits directory
    $FolderStats = ForEach($Directory in $NewAudits){
        #Create a new object per Directory
        $FolderObject = New-Object -TypeName PSCustomObject -Property @{
            Name = $Directory.Name
            OldPath = $Directory.FullName
            NewPath = $null
            }
        #Attempt to move the directory to the new location
        Try{
            Move-Item -Path $Directory.FullName -Destination $EncryptedFolder -ErrorAction Stop
            #Update the FolderObject with new information
            $FolderObject.NewPath = (-join($EncryptedFolder,$Directory.Name))
            }
        #Unable to move folder, becuase it probably already existed.
        Catch{
            #Query Destination Folder for files with the same name, and return a current count.
            [Double]$RevisionNumber = ($EncryptedAudits | Where { $_.Name -eq $Directory.Name -or $_.Name -match (-join($Directory.Name,"-Rev")) }).Count
            #Increment the count returned by one, to give us the new Revision number.
            $RevisionNumber++
            $NewName = (-join($Directory.Name,"-Rev",$RevisionNumber))
            #Move the Directory, appending the revision number en route.
            Move-Item $Directory.FullName -Destination (-join($EncryptedFolder,$NewName))
            $FolderObject.NewPath = (-join($EncryptedFolder,$NewName))
            }
        Finally{
            #Return the Object
            $FolderObject
            }
        }
    #Create HTML Document for logging.
    $FolderStats | ConvertTo-Html | Out-File -FilePath ("$LogFolder\Log.html")
    }
else{
    #No new Audits were found at the time the script was run.
    Write-Output ("There are no directories that have LastWriteTime values before "+(Get-Date -Format "yyyy-MM-dd")+".")
    }
Read more ...

An Intelligent File Backup

Sunday, July 13, 2014
<#
Purpose:    To create an intelligent backup system for large directories.
Notes:        I wrote this to perform a renditional backup of a large and complex directory.
            I've found that (for whatever reason) files keep getting removed from the
            diectory, so I've added a feature to keep track of deleted items as well.
            I know that there are applications to do this, but I wanted to see if I
            could do it myself!  I'm please with the overall outcome, but would appreciate
            any feedback!
Issues:        No known issues.  Use at your own peril!
#>

#Set Directory Paths
$SourceDirectory = "C:\SourceDirectory\"
$BackupLocation = "C:\BackupDirectory\"

#Get all files contained in Source Folder for processing
$SourceFiles = Get-ChildItem -Path $SourceDirectory -File -Recurse

#Create an array to store the output
$MoveStats = New-Object System.Collections.ArrayList

#Iterate through all of the files in SourceDirectory
$MoveStats = ForEach($File in $SourceFiles){
    $OldPath = ($File.FullName)
    $NewPath = $OldPath.Replace($SourceDirectory,$BackupLocation)
    #Create new object to store output
    $FileObject = New-Object -TypeName PSCustomObject -Property @{
        File = $File.Name
        OriginalPath = $File.FullName
        BackupPath = $null
        Action = $null
        RenditionPath = "N/A"
        }
    #Check for existing file in Backup Location
    if(Test-Path $NewPath){
        
        #File exists.  Check to see if needs updating
        if(($File.LastWriteTime) -gt ((Get-Item $NewPath).LastWriteTime)){
            
            #Rename existing backup, appending date/time to extention
            $NewName = (-join(($NewPath),"-",(Get-Date -Format "yyyyMMdd.HHmmss"),".BACKUP"))
            Get-Item -Path $NewPath | Rename-Item -NewName $NewName
            
            #Copy item to backup location
            Copy-Item -Path $File.FullName -Destination $NewPath
            
            #Update FileObject properties
            $FileObject.BackupPath = $NewPath
            $FileObject.Action = "Rendition Saved."
            $FileObject.RenditionPath = $NewName
            }
            
        else{
            #No action taken
            $FileObject.BackupPath = $NewPath
            $FileObject.Action = "No action taken."
            }
        }
    else{
        #File does not exist.  Copy file.
        New-Item -ItemType File -Path $NewPath -Force | Out-Null
        Copy-Item -Path $OldPath -Destination $NewPath
        $FileObject.BackupPath = $NewPath
        $FileObject.Action = "New File Copied."
        }
    
    #Returning Object
    $FileObject
    }
    
#Gather BackupFile inventory in order to mark deletions, ignoring Backup and Deleted Folders
$BackupDirectory = Get-ChildItem $BackupLocation -File -Recurse 
$BackupFiles = $BackupDirectory | Where {$_.name -notmatch ".BACKUP" -and $_.name -notmatch ".DELETED"}
ForEach($BackupFile in $BackupFiles){
    #Build FileName for checking against SourceFiles
    $BackupFileName = ($BackupFile.FullName).replace($BackupLocation,$SourceDirectory)
    #Check for missing BackupFile in SourceFile Directory
    if($SourceFiles.FullName -notcontains $BackupFileName){
        #File does not exist.  Rename it, appending Date/Time and "-DELETED"
        $DeletedName = (-join($BackupFile.FullName,"-",(Get-Date -Format "yyyyMMdd.HHmmss"),".DELETED"))
        Rename-Item -Path ($BackupFile.FullName) -NewName $DeletedName
        #Add New Object to MoveStats
        $MoveStats += New-Object -TypeName PSCustomObject -Property @{
            File = $BackupFile.Name
            OriginalPath = "N/A"
            BackupPath = $BackupFile.FullName
            Action = "Deleted"
            RenditionPath = "N/A"
            }
        }
    }
    
#Display Output
$MoveStats | Select File,OriginalPath,BackupPath,Action,RenditionPath | Out-GridView
Read more ...

A Semi-Intelligent File Backup

Saturday, July 12, 2014
PowerGUI Script Editor

Purpose:  Create an efficient backup process for large directories.
Notes:    First, the script enumerates all of the files in a specific directory, and stored that data in a variable (SourceFiles).  It then check for a matching file in the backup directory.  If it does not find a matching file, it simply copies the file and the directory structure.  If it does find a matching file, it looks to the LastWriteTime property, and only replaces the backup file is the other file is newer.  I do plan to make this a renditional backup at some point when I am more awake.

#Set Directory Paths
$SourceDirectory = "C:\Scripts"
$BackupLocation = "C:\Backup"

#Get all files contained in Source Folder for processing
$SourceFiles = Get-ChildItem -Path $SourceDirectory -File -Recurse

#Create an array to store the output
$MoveStats = New-Object System.Collections.ArrayList

#Action
$MoveStats = ForEach($File in $SourceFiles){
    $OldPath = ($File.FullName)
    $NewPath = $OldPath.Replace($SourceDirectory,$BackupLocation)
    #Create new object to store output
    $FileObject = New-Object -TypeName PSCustomObject -Property @{
        File = $File.Name
        OriginalPath = $File.FullName
        BackupPath = $null
        Action = $null
        }
    #Check for existing file in Backup Location
    if(Test-Path $NewPath){
        #File exists.  Check to see if needs updating
        if(($File.LastWriteTime) -gt ((Get-Item $NewPath).LastWriteTime)){    
            #Overwrite the file (Will change to renditional in the future.)
            $FileObject.BackupPath = $NewFile
            Copy-Item -Path $OldPath -Destination $NewPath -Force
            $FileObject.Action = "File Updated."
            }
        else{
            #No action taken
            $FileObject.BackupPath = $NewPath
            $FileObject.Action = "No action taken."
            }
        }
    else{
        #File does not exist.  Copy file.
        New-Item -ItemType File -Path $NewPath -Force | Out-Null
        Copy-Item -Path $OldPath -Destination $NewPath
        $FileObject.BackupPath = $NewPath
        $FileObject.Action = "New File Copied."
        }
    #Returning Object
    $FileObject
    }
    
#Display Output
$MoveStats | Select File, Action
Read more ...