A simple python script. Handy for offloading
Bash:
#!/bin/bash
# Set your DigitalOcean Spaces credentials
export AWS_ACCESS_KEY_ID="Dxxxxxxxxxxxxx"
export AWS_SECRET_ACCESS_KEY="axxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
export AWS_DEFAULT_REGION="nyc3"
export S3_ENDPOINT="https://nyc3.digitaloceanspaces.com" # e.g., nyc3.digitaloceanspaces.com
# Set the root folder containing the images and subfolders to upload
root_folder="path_to_root_folder"
bucket_name=$(basename "$root_folder") # Use the folder name as the bucket name
# Check if the bucket exists
if ! s3cmd ls s3:// | grep -q "$bucket_name"; then
echo "Bucket $bucket_name does not exist. Creating bucket..."
s3cmd mb s3://$bucket_name --access-public
fi
# Function to upload files to DigitalOcean Spaces
upload_files() {
local folder=$1
local base_path=$(echo "$folder" | sed "s|^$root_folder||") # relative path from root_folder
for file in "$folder"/*; do
if [ -d "$file" ]; then
upload_files "$file" # Recursively call the function for subfolders
else
local s3_path="s3://$bucket_name$base_path/$(basename "$file")"
echo "Uploading $file to $s3_path..."
s3cmd put "$file" "$s3_path"
fi
done
}
# Call the function to upload files from the root folder and its subfolders
upload_files "$root_folder"
# Get the links to the uploaded images
echo "Links to the uploaded images:"
s3cmd ls s3://$bucket_name/ --recursive | awk '{print $4}'