add centos stream and refactor script

This commit is contained in:
Lukas Greve
2025-10-25 13:48:53 +02:00
parent 4272105dd2
commit ab633d601d

View File

@@ -1,215 +1,78 @@
#!/bin/bash
# Script to detect locally available OS images and update image_location URLs in main.tf files
# This script updates terraform configurations to use local image paths instead of remote URLs
# It also supports reverting back to original remote URLs using hardcoded values
# Function to display usage
usage() {
echo "Usage: $0 [options]"
echo " options:"
echo " -h, --help Display this help message"
echo " -d, --dry-run Show what would be changed without making modifications"
echo " -r, --revert Revert image_location URLs back to original remote URLs"
echo ""
echo "Example:"
echo " $0 # Convert remote URLs to local paths (default)"
echo " $0 -d # Dry run - show what would be updated"
echo " $0 -r # Revert to original remote URLs"
echo " $0 -r -d # Dry run revert mode"
exit 1
}
# Parse command line arguments
DRY_RUN=false
REVERT_MODE=false
while [[ $# -gt 0 ]]; do
case $1 in
-h|--help)
usage
;;
-d|--dry-run)
DRY_RUN=true
shift
;;
-r|--revert)
REVERT_MODE=true
shift
;;
*)
echo "Unknown option: $1"
usage
;;
esac
done
# Define the directory where images are stored
IMAGE_DIR="/var/lib/libvirt/images"
# Check if we have write permissions to the target directory
if [[ ! -d "$IMAGE_DIR" ]]; then
echo "Error: Directory $IMAGE_DIR does not exist"
exit 1
fi
# Image URLs (hardcoded original URLs from download_images.sh)
ORIGINAL_IMAGES=(
# Image URLs
IMAGES=(
"https://cloud.debian.org/images/cloud/trixie/latest/debian-13-genericcloud-amd64.raw"
"https://download.fedoraproject.org/pub/fedora/linux/releases/42/Cloud/x86_64/images/Fedora-Cloud-Base-Generic-42-1.1.x86_64.qcow2"
"https://download.opensuse.org/tumbleweed/appliances/openSUSE-Tumbleweed-Minimal-VM.x86_64-Cloud.qcow2"
"https://dl.rockylinux.org/pub/rocky/10/images/x86_64/Rocky-10-GenericCloud-Base.latest.x86_64.qcow2"
"https://cloud-images.ubuntu.com/noble/current/noble-server-cloudimg-amd64.img"
"https://cloud.centos.org/centos/10-stream/x86_64/images/CentOS-Stream-GenericCloud-x86_64-10-latest.x86_64.qcow2"
)
# Function to get filename from URL
get_filename_from_url() {
local url=$1
basename "$url"
# Target directory
TARGET_DIR="/var/lib/libvirt/images"
# Main script execution
main() {
# Check if we have write permissions to the target directory
if [[ ! -w "$TARGET_DIR" ]]; then
# Check if we're already running as root
if [[ $EUID -ne 0 ]]; then
echo "This script requires write access to $TARGET_DIR"
echo "Re-executing with sudo..."
exec sudo "$0" "$@"
else
echo "Error: Cannot write to $TARGET_DIR even with sudo privileges."
exit 1
fi
fi
# Download all images
echo "Starting download of all images..."
echo ""
local success_count=0
local failure_count=0
for url in "${IMAGES[@]}"; do
local filename
filename=$(basename "$url")
local filepath="$TARGET_DIR/$filename"
if [[ -f "$filepath" ]]; then
echo "Image $filename already exists, skipping..."
((success_count++))
continue
fi
echo "Downloading $filename..."
# Use wget with progress and retry options
if ! wget -P "$TARGET_DIR" --progress=bar:force:noscroll -c "$url"; then
echo "Failed to download $filename"
((failure_count++))
else
echo "Download completed: $filename"
((success_count++))
fi
done
# Summary
echo ""
echo "Download summary:"
echo "Successful downloads: $success_count"
echo "Failed downloads: $failure_count"
if [[ $failure_count -gt 0 ]]; then
echo "Some downloads failed. Check above messages for details."
exit 1
else
echo "All images downloaded successfully!"
fi
}
# Find all main.tf files and process them
MAIN_TF_FILES=$(find . -name "main.tf" -type f)
if [ -z "$MAIN_TF_FILES" ]; then
echo "No main.tf files found!"
exit 1
fi
echo "Found main.tf files:"
echo "$MAIN_TF_FILES"
echo ""
# Process each file
for file in $MAIN_TF_FILES; do
echo "Processing $file..."
# Check if the file contains image_location lines
if ! grep -q "image_location" "$file"; then
echo " No image_location found in $file, skipping..."
continue
fi
if [ "$REVERT_MODE" = true ]; then
# Revert operation: change file:// back to original https:// URLs
# Create a temporary file to avoid read/write race condition
temp_file=$(mktemp)
# Process the file line by line to avoid reading/writing the same file
while IFS= read -r line; do
if [[ "$line" =~ .*image_location.*=.*\"file://(.*?)\".* ]]; then
# Extract local path from the file:// URL
local_file_path="${BASH_REMATCH[1]}"
local_filename=$(basename "$local_file_path")
# Find matching original URL for this filename
found_match=false
for original_url in "${ORIGINAL_IMAGES[@]}"; do
if [[ "$(basename "$original_url")" == "$local_filename" ]]; then
echo " Found matching file: $local_filename"
if [ "$DRY_RUN" = false ]; then
# Replace the line in temporary file
sed -e "s|image_location.*=.*\"file://$local_file_path\"|image_location = \"$original_url\"|" <<< "$line" >> "$temp_file"
echo " Reverted to original URL: $original_url"
else
echo " Would revert to: $original_url"
echo "$line" >> "$temp_file"
fi
found_match=true
break
fi
done
# If we didn't find a match, still copy the original line
if [ "$found_match" = false ]; then
echo " Warning: No matching original URL found for $local_filename"
echo "$line" >> "$temp_file"
fi
else
# Not a line with image_location, just copy as is
echo "$line" >> "$temp_file"
fi
done < "$file"
# If not in dry run mode, replace the original file with the temporary file
if [ "$DRY_RUN" = false ]; then
mv "$temp_file" "$file"
else
rm "$temp_file"
fi
else
# Normal operation: convert remote URLs to local paths
temp_file=$(mktemp)
# Process the file line by line to avoid reading/writing the same file
while IFS= read -r line; do
if [[ "$line" =~ .*image_location.*=.*\"(https://.*)\".* ]]; then
remote_url="${BASH_REMATCH[1]}"
filename=$(basename "$remote_url")
# Check if the local file exists
local_path="$IMAGE_DIR/$filename"
if [[ -f "$local_path" ]]; then
echo " Found local image: $filename"
if [ "$DRY_RUN" = false ]; then
# Replace the line in temporary file
sed -e "s|image_location.*=.*\"$remote_url\"|image_location = \"file://$local_path\"|" <<< "$line" >> "$temp_file"
echo " Updated to: file://$local_path"
else
echo " Would update to: file://$local_path"
echo "$line" >> "$temp_file"
fi
else
echo " Local image not found: $filename"
echo "$line" >> "$temp_file"
fi
else
# Not a line with image_location, just copy as is
echo "$line" >> "$temp_file"
fi
done < "$file"
# If not in dry run mode, replace the original file with the temporary file
if [ "$DRY_RUN" = false ]; then
mv "$temp_file" "$file"
else
rm "$temp_file"
fi
if [ "$DRY_RUN" = false ] && [ -f "$temp_file" ]; then
echo " Updated $file"
elif [ "$DRY_RUN" = true ]; then
echo " Would update $file (dry run)"
fi
fi
done
echo ""
if [ "$DRY_RUN" = false ]; then
if [ "$REVERT_MODE" = true ]; then
echo "Image location URLs have been successfully reverted to original remote URLs!"
else
echo "Image location URLs have been successfully updated in all main.tf files!"
fi
else
echo "Dry run completed - no changes were made."
fi
# Show a summary of what would be changed
echo ""
echo "Summary of local image availability:"
find "$IMAGE_DIR" -maxdepth 1 -type f -name "*.qcow2" -o -name "*.raw" -o -name "*.img" | \
while read -r image; do
filename=$(basename "$image")
echo "$filename"
done
# If nothing was found, show what images are expected
if ! find "$IMAGE_DIR" -maxdepth 1 -type f -name "*.qcow2" -o -name "*.raw" -o -name "*.img" | grep -q .; then
echo " No local images found in $IMAGE_DIR"
echo " Run download_images.sh to download required images."
# Run main function if script is executed directly
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
main "$@"
fi