Windows PowerShell Automation
File Organization Script
# Auto-sort downloads by file type
$source = "$env:USERPROFILE\Downloads"
$destinations = @{
"Images" = @(".jpg", ".jpeg", ".png", ".gif", ".bmp", ".webp")
"Documents" = @(".pdf", ".doc", ".docx", ".txt", ".rtf", ".xlsx", ".pptx")
"Archives" = @(".zip", ".rar", ".7z", ".tar", ".gz")
"Videos" = @(".mp4", ".avi", ".mkv", ".mov", ".wmv")
"Music" = @(".mp3", ".flac", ".wav", ".aac", ".ogg")
"Executables" = @(".exe", ".msi", ".bat", ".ps1")
}
foreach ($folder in $destinations.Keys) {
$path = Join-Path $source $folder
if (!(Test-Path $path)) { New-Item -ItemType Directory -Path $path }
foreach ($ext in $destinations[$folder]) {
Get-ChildItem -Path $source -Filter "*$ext" | Move-Item -Destination $path -ErrorAction SilentlyContinue
}
}
Automated Backup Script
$source = "C:\ImportantData"
$backupRoot = "D:\Backups"
$date = Get-Date -Format "yyyy-MM-dd"
$destination = Join-Path $backupRoot $date
# Create dated folder
New-Item -ItemType Directory -Path $destination -Force
# Robocopy with logging
$logFile = Join-Path $destination "backup.log"
robocopy $source $destination /MIR /R:3 /W:10 /LOG:$logFile /TEE /XD "Temp" "Cache"
# Keep only last 7 backups
Get-ChildItem $backupRoot | Sort-Object CreationTime -Descending | Select-Object -Skip 7 | Remove-Item -Recurse -Force
System Maintenance Automation
# Clean temp files
Remove-Item -Path "$env:TEMP\*" -Recurse -Force -ErrorAction SilentlyContinue
Remove-Item -Path "C:\Windows\Temp\*" -Recurse -Force -ErrorAction SilentlyContinue
# Empty Recycle Bin
Clear-RecycleBin -Force -ErrorAction SilentlyContinue
# Run disk cleanup
cleanmgr /sagerun:1
# Defrag HDD only (skip SSD)
Get-Volume | Where-Object {$_.DriveType -eq 'Fixed' -and $_.FileSystem -eq 'NTFS'} | ForEach-Object {
$drive = $_.DriveLetter
$mediaType = (Get-PhysicalDisk | Where-Object {$_.DeviceId -eq (Get-Partition -DriveLetter $drive).DiskNumber}).MediaType
if ($mediaType -eq 'HDD') {
Optimize-Volume -DriveLetter $drive -Defrag
} else {
Optimize-Volume -DriveLetter $drive -Retrim
}
}
# Update help
Update-Help -Force
Task Scheduler Integration
Create Automated Task
$action = New-ScheduledTaskAction -Execute "powershell.exe" -Argument "-File C:\Scripts\backup.ps1"
$trigger = New-ScheduledTaskTrigger -Daily -At "02:00"
$settings = New-ScheduledTaskSettingsSet -RunOnlyIfNetworkAvailable -WakeToRun
$principal = New-ScheduledTaskPrincipal -UserId "SYSTEM" -RunLevel Highest
Register-ScheduledTask -TaskName "DailyBackup" -Action $action -Trigger $trigger -Settings $settings -Principal $principal
Common Triggers
-AtLogon
-AtStartup
-Daily -At "06:00"
-Weekly -DaysOfWeek Monday -At "09:00"
-Once -At (Get-Date).AddMinutes(5)
-OnIdle
-OnEvent -LogName System -Source "Service Control Manager"
Python Automation
Email Processing
import imaplib
import email
from datetime import datetime, timedelta
def archive_old_emails():
imap = imaplib.IMAP4_SSL("imap.gmail.com")
imap.login("user@gmail.com", "app_password")
# Archive emails older than 30 days
cutoff_date = (datetime.now() - timedelta(days=30)).strftime("%d-%b-%Y")
imap.select("INBOX")
_, messages = imap.search(None, f'BEFORE {cutoff_date}')
for msg_id in messages[0].split():
imap.copy(msg_id, "Archive")
imap.store(msg_id, "+FLAGS", "\\Deleted")
imap.expunge()
imap.close()
imap.logout()
if __name__ == "__main__":
archive_old_emails()
API Data Sync
import requests
import json
from datetime import datetime
def sync_data():
headers = {"Authorization": "Bearer TOKEN"}
# Fetch data
response = requests.get("https://api.example.com/data", headers=headers)
data = response.json()
# Process and save
processed = [item for item in data if item["status"] == "active"]
with open(f"backup_{datetime.now().strftime('%Y%m%d')}.json", "w") as f:
json.dump(processed, f)
# Upload to secondary system
requests.post("https://api.backup.com/sync", json=processed, headers=headers)
if __name__ == "__main__":
sync_data()
File Watcher
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler
import time
class Handler(FileSystemEventHandler):
def on_created(self, event):
if event.is_directory:
return
print(f"New file: {event.src_path}")
# Trigger processing script
observer = Observer()
observer.schedule(Handler(), path="C:\WatchFolder", recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
Bash/Linux Automation
Log Rotation
#!/bin/bash
# rotate_logs.sh
LOG_DIR="/var/log/myapp"
ARCHIVE_DIR="/var/log/myapp/archive"
DAYS_TO_KEEP=30
# Create archive directory
mkdir -p $ARCHIVE_DIR
# Compress logs older than 7 days
find $LOG_DIR -name "*.log" -mtime +7 -exec gzip {} \; -exec mv {}.gz $ARCHIVE_DIR \;
# Delete archives older than retention period
find $ARCHIVE_DIR -name "*.gz" -mtime +$DAYS_TO_KEEP -delete
# Restart service to create new log file
systemctl restart myapp
Database Backup
#!/bin/bash
# db_backup.sh
DB_NAME="production"
DB_USER="backup_user"
BACKUP_DIR="/backups/mysql"
DATE=$(date +%Y%m%d_%H%M%S)
# Create backup
mysqldump -u $DB_USER -p$DB_PASS $DB_NAME | gzip > $BACKUP_DIR/${DB_NAME}_${DATE}.sql.gz
# Upload to S3
aws s3 cp $BACKUP_DIR/${DB_NAME}_${DATE}.sql.gz s3://company-backups/mysql/
# Clean local backups older than 7 days
find $BACKUP_DIR -name "*.sql.gz" -mtime +7 -delete
Server Health Check
#!/bin/bash
# health_check.sh
THRESHOLD_CPU=80
THRESHOLD_MEM=80
THRESHOLD_DISK=90
CPU_USAGE=$(top -bn1 | grep "Cpu(s)" | awk '{print $2}' | cut -d'%' -f1)
MEM_USAGE=$(free | grep Mem | awk '{print $3/$2 * 100.0}')
DISK_USAGE=$(df / | tail -1 | awk '{print $5}' | cut -d'%' -f1)
ALERT=""
if (( $(echo "$CPU_USAGE > $THRESHOLD_CPU" | bc -l) )); then
ALERT+="High CPU: ${CPU_USAGE}%\n"
fi
if (( $(echo "$MEM_USAGE > $THRESHOLD_MEM" | bc -l) )); then
ALERT+="High Memory: ${MEM_USAGE}%\n"
fi
if (( DISK_USAGE > THRESHOLD_DISK )); then
ALERT+="High Disk: ${DISK_USAGE}%\n"
fi
if [ ! -z "$ALERT" ]; then
echo -e "$ALERT" | mail -s "Server Alert $(hostname)" admin@company.com
fi
Windows Batch Scripts
Daily Cleanup
@echo off
set LOGFILE=C:\Logs\cleanup_%date:~-4,4%%date:~-10,2%%date:~-7,2%.log
echo Starting cleanup %date% %time% >> %LOGFILE%
:: Delete temp files
del /q/f/s %TEMP%\* >> %LOGFILE% 2>&1
del /q/f/s C:\Windows\Temp\* >> %LOGFILE% 2>&1
:: Clear browser cache
rmdir /q/s "%LOCALAPPDATA%\Google\Chrome\User Data\Default\Cache" >> %LOGFILE% 2>&1
:: Flush DNS
ipconfig /flushdns >> %LOGFILE% 2>&1
echo Cleanup completed %date% %time% >> %LOGFILE%
Service Monitor
@echo off
sc query "MyService" | find "RUNNING"
if %errorlevel% neq 0 (
net start "MyService"
echo %date% %time% Service restarted >> C:\Logs\service_monitor.log
)
Cron Jobs
Linux Scheduling
# Edit crontab
crontab -e
# Examples
0 2 * * * /home/user/scripts/backup.sh # Daily at 2 AM
0 */6 * * * /home/user/scripts/sync.sh # Every 6 hours
0 9 * * 1 /home/user/scripts/report.sh # Mondays at 9 AM
*/5 * * * * /home/user/scripts/monitor.sh # Every 5 minutes
0 0 1 * * /home/user/scripts/monthly_cleanup.sh # First of month
Windows Cron Equivalent
# Daily at 3 AM
$trigger = New-ScheduledTaskTrigger -Daily -At "03:00"
# Every 4 hours
$trigger = New-ScheduledTaskTrigger -Once -At "00:00" -RepetitionInterval (New-TimeSpan -Hours 4) -RepetitionDuration (New-TimeSpan -Days 1)
# On idle
$trigger = New-ScheduledTaskTrigger -IdleDuration (New-TimeSpan -Minutes 10)
Browser Automation
Selenium Python
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options
options = Options()
options.add_argument("--headless")
options.add_argument("--no-sandbox")
driver = webdriver.Chrome(options=options)
driver.get("https://example.com/login")
driver.find_element(By.ID, "username").send_keys("user")
driver.find_element(By.ID, "password").send_keys("pass")
driver.find_element(By.ID, "submit").click()
# Extract data
data = driver.find_element(By.CLASS_NAME, "data").text
with open("output.txt", "w") as f:
f.write(data)
driver.quit()
Cloud Automation
AWS CLI
# Sync S3 bucket
aws s3 sync s3://source-bucket s3://dest-bucket
# Start EC2 instances
aws ec2 start-instances --instance-ids i-1234567890abcdef0
# Create RDS snapshot
aws rds create-db-snapshot --db-instance-identifier prod-db --db-snapshot-identifier prod-$(date +%Y%m%d)
Azure CLI
# Start VM
az vm start --name MyVM --resource-group MyRG
# Backup SQL database
az sql db export --server myserver --name mydb --storage-uri https://mystorage.blob.core.windows.net/backups/mydb.bacpac
Git Automation
Auto-commit Script
#!/bin/bash
# auto_commit.sh
cd /path/to/repo
git add .
git commit -m "Auto update $(date '+%Y-%m-%d %H:%M:%S')"
git push origin main
Git Hooks
# .git/hooks/pre-commit
#!/bin/bash
# Run tests before commit
npm test
if [ $? -ne 0 ]; then
echo "Tests failed. Commit aborted."
exit 1
fi
Notification Integration
Slack Webhook
import requests
import json
def send_slack(message):
webhook_url = "https://hooks.slack.com/services/TOKEN"
payload = {"text": message}
requests.post(webhook_url, json=payload)
send_slack("Backup completed successfully")
Email Alert
$smtp = "smtp.gmail.com"
$port = 587
$from = "alert@company.com"
$to = "admin@company.com"
$subject = "Automation Alert"
$body = "Task completed at $(Get-Date)"
$credential = New-Object System.Management.Automation.PSCredential($from, (ConvertTo-SecureString "password" -AsPlainText -Force))
Send-MailMessage -SmtpServer $smtp -Port $port -UseSsl -Credential $credential -From $from -To $to -Subject $subject -Body $body