add centos stream and refactor script

This commit is contained in:
Lukas Greve
2025-10-25 13:48:53 +02:00
parent 4272105dd2
commit ab633d601d

View File

@@ -1,215 +1,78 @@
#!/bin/bash #!/bin/bash
# Script to detect locally available OS images and update image_location URLs in main.tf files # Image URLs
# This script updates terraform configurations to use local image paths instead of remote URLs IMAGES=(
# It also supports reverting back to original remote URLs using hardcoded values
# Function to display usage
usage() {
echo "Usage: $0 [options]"
echo " options:"
echo " -h, --help Display this help message"
echo " -d, --dry-run Show what would be changed without making modifications"
echo " -r, --revert Revert image_location URLs back to original remote URLs"
echo ""
echo "Example:"
echo " $0 # Convert remote URLs to local paths (default)"
echo " $0 -d # Dry run - show what would be updated"
echo " $0 -r # Revert to original remote URLs"
echo " $0 -r -d # Dry run revert mode"
exit 1
}
# Parse command line arguments
DRY_RUN=false
REVERT_MODE=false
while [[ $# -gt 0 ]]; do
case $1 in
-h|--help)
usage
;;
-d|--dry-run)
DRY_RUN=true
shift
;;
-r|--revert)
REVERT_MODE=true
shift
;;
*)
echo "Unknown option: $1"
usage
;;
esac
done
# Define the directory where images are stored
IMAGE_DIR="/var/lib/libvirt/images"
# Check if we have write permissions to the target directory
if [[ ! -d "$IMAGE_DIR" ]]; then
echo "Error: Directory $IMAGE_DIR does not exist"
exit 1
fi
# Image URLs (hardcoded original URLs from download_images.sh)
ORIGINAL_IMAGES=(
"https://cloud.debian.org/images/cloud/trixie/latest/debian-13-genericcloud-amd64.raw" "https://cloud.debian.org/images/cloud/trixie/latest/debian-13-genericcloud-amd64.raw"
"https://download.fedoraproject.org/pub/fedora/linux/releases/42/Cloud/x86_64/images/Fedora-Cloud-Base-Generic-42-1.1.x86_64.qcow2" "https://download.fedoraproject.org/pub/fedora/linux/releases/42/Cloud/x86_64/images/Fedora-Cloud-Base-Generic-42-1.1.x86_64.qcow2"
"https://download.opensuse.org/tumbleweed/appliances/openSUSE-Tumbleweed-Minimal-VM.x86_64-Cloud.qcow2" "https://download.opensuse.org/tumbleweed/appliances/openSUSE-Tumbleweed-Minimal-VM.x86_64-Cloud.qcow2"
"https://dl.rockylinux.org/pub/rocky/10/images/x86_64/Rocky-10-GenericCloud-Base.latest.x86_64.qcow2" "https://dl.rockylinux.org/pub/rocky/10/images/x86_64/Rocky-10-GenericCloud-Base.latest.x86_64.qcow2"
"https://cloud-images.ubuntu.com/noble/current/noble-server-cloudimg-amd64.img" "https://cloud-images.ubuntu.com/noble/current/noble-server-cloudimg-amd64.img"
"https://cloud.centos.org/centos/10-stream/x86_64/images/CentOS-Stream-GenericCloud-x86_64-10-latest.x86_64.qcow2"
) )
# Function to get filename from URL # Target directory
get_filename_from_url() { TARGET_DIR="/var/lib/libvirt/images"
local url=$1
basename "$url"
}
# Find all main.tf files and process them # Main script execution
MAIN_TF_FILES=$(find . -name "main.tf" -type f) main() {
# Check if we have write permissions to the target directory
if [ -z "$MAIN_TF_FILES" ]; then if [[ ! -w "$TARGET_DIR" ]]; then
echo "No main.tf files found!" # Check if we're already running as root
if [[ $EUID -ne 0 ]]; then
echo "This script requires write access to $TARGET_DIR"
echo "Re-executing with sudo..."
exec sudo "$0" "$@"
else
echo "Error: Cannot write to $TARGET_DIR even with sudo privileges."
exit 1 exit 1
fi fi
fi
echo "Found main.tf files:" # Download all images
echo "$MAIN_TF_FILES" echo "Starting download of all images..."
echo "" echo ""
# Process each file local success_count=0
for file in $MAIN_TF_FILES; do local failure_count=0
echo "Processing $file..."
# Check if the file contains image_location lines for url in "${IMAGES[@]}"; do
if ! grep -q "image_location" "$file"; then local filename
echo " No image_location found in $file, skipping..." filename=$(basename "$url")
local filepath="$TARGET_DIR/$filename"
if [[ -f "$filepath" ]]; then
echo "Image $filename already exists, skipping..."
((success_count++))
continue continue
fi fi
if [ "$REVERT_MODE" = true ]; then echo "Downloading $filename..."
# Revert operation: change file:// back to original https:// URLs
# Create a temporary file to avoid read/write race condition
temp_file=$(mktemp)
# Process the file line by line to avoid reading/writing the same file # Use wget with progress and retry options
while IFS= read -r line; do if ! wget -P "$TARGET_DIR" --progress=bar:force:noscroll -c "$url"; then
if [[ "$line" =~ .*image_location.*=.*\"file://(.*?)\".* ]]; then echo "Failed to download $filename"
# Extract local path from the file:// URL ((failure_count++))
local_file_path="${BASH_REMATCH[1]}"
local_filename=$(basename "$local_file_path")
# Find matching original URL for this filename
found_match=false
for original_url in "${ORIGINAL_IMAGES[@]}"; do
if [[ "$(basename "$original_url")" == "$local_filename" ]]; then
echo " Found matching file: $local_filename"
if [ "$DRY_RUN" = false ]; then
# Replace the line in temporary file
sed -e "s|image_location.*=.*\"file://$local_file_path\"|image_location = \"$original_url\"|" <<< "$line" >> "$temp_file"
echo " Reverted to original URL: $original_url"
else else
echo " Would revert to: $original_url" echo "Download completed: $filename"
echo "$line" >> "$temp_file" ((success_count++))
fi
found_match=true
break
fi
done
# If we didn't find a match, still copy the original line
if [ "$found_match" = false ]; then
echo " Warning: No matching original URL found for $local_filename"
echo "$line" >> "$temp_file"
fi
else
# Not a line with image_location, just copy as is
echo "$line" >> "$temp_file"
fi
done < "$file"
# If not in dry run mode, replace the original file with the temporary file
if [ "$DRY_RUN" = false ]; then
mv "$temp_file" "$file"
else
rm "$temp_file"
fi
else
# Normal operation: convert remote URLs to local paths
temp_file=$(mktemp)
# Process the file line by line to avoid reading/writing the same file
while IFS= read -r line; do
if [[ "$line" =~ .*image_location.*=.*\"(https://.*)\".* ]]; then
remote_url="${BASH_REMATCH[1]}"
filename=$(basename "$remote_url")
# Check if the local file exists
local_path="$IMAGE_DIR/$filename"
if [[ -f "$local_path" ]]; then
echo " Found local image: $filename"
if [ "$DRY_RUN" = false ]; then
# Replace the line in temporary file
sed -e "s|image_location.*=.*\"$remote_url\"|image_location = \"file://$local_path\"|" <<< "$line" >> "$temp_file"
echo " Updated to: file://$local_path"
else
echo " Would update to: file://$local_path"
echo "$line" >> "$temp_file"
fi
else
echo " Local image not found: $filename"
echo "$line" >> "$temp_file"
fi
else
# Not a line with image_location, just copy as is
echo "$line" >> "$temp_file"
fi
done < "$file"
# If not in dry run mode, replace the original file with the temporary file
if [ "$DRY_RUN" = false ]; then
mv "$temp_file" "$file"
else
rm "$temp_file"
fi
if [ "$DRY_RUN" = false ] && [ -f "$temp_file" ]; then
echo " Updated $file"
elif [ "$DRY_RUN" = true ]; then
echo " Would update $file (dry run)"
fi
fi fi
done done
# Summary
echo "" echo ""
if [ "$DRY_RUN" = false ]; then echo "Download summary:"
if [ "$REVERT_MODE" = true ]; then echo "Successful downloads: $success_count"
echo "Image location URLs have been successfully reverted to original remote URLs!" echo "Failed downloads: $failure_count"
else
echo "Image location URLs have been successfully updated in all main.tf files!"
fi
else
echo "Dry run completed - no changes were made."
fi
# Show a summary of what would be changed if [[ $failure_count -gt 0 ]]; then
echo "" echo "Some downloads failed. Check above messages for details."
echo "Summary of local image availability:" exit 1
find "$IMAGE_DIR" -maxdepth 1 -type f -name "*.qcow2" -o -name "*.raw" -o -name "*.img" | \ else
while read -r image; do echo "All images downloaded successfully!"
filename=$(basename "$image") fi
echo "$filename" }
done
# Run main function if script is executed directly
# If nothing was found, show what images are expected if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
if ! find "$IMAGE_DIR" -maxdepth 1 -type f -name "*.qcow2" -o -name "*.raw" -o -name "*.img" | grep -q .; then main "$@"
echo " No local images found in $IMAGE_DIR"
echo " Run download_images.sh to download required images."
fi fi