A litte different concept ...
#!/bin/bash
if [ $# != 1 ]
then
echo "Need Path for Search!"
exit 1
fi
cd $1
find . -maxdepth 2 -mindepth 2 -type d | while read -r dir
do
albumpath="$dir"
# Escape any problematic character
encoded="$(perl -MURI::Escape -e 'print uri_escape($ARGV[0]);' "$albumpath")"
# Skip if a cover.jpg exists in the directory
if [ -f "$albumpath/cover.jpg" ]
then
echo "$albumpath/cover.jpg already exists"
continue
fi
# Tell the user what is going on
echo ""
echo "Searching for: [$albumpath]"
# scraping AlbumArt.org
url="http://www.albumart.org/index.php?searchkey=$encoded&itempage=1&newsearch=1&searchindex=Music"
echo "Searching ... [$url]"
# Grab the first Amazon image without an underscore (usually the largest version)
coverurl=`wget -qO - "$url" | grep -m 1 -o 'http://ecx.images-amazon.com/images/I/*/[%0-9a-zA-Z.,-]*.jpg'`
if [ "x" == "x$coverurl" ]
then
albumpath=`dirname "$albumpath"`
echo "Neuer Versuch mit '$albumpath'"
encoded="$(perl -MURI::Escape -e 'print uri_escape($ARGV[0]);' "$albumpath")"
url="http://www.albumart.org/index.php?searchkey=$encoded&itempage=1&newsearch=1&searchindex=Music"
coverurl=`wget -qO - "$url" | grep -m 1 -o 'http://ecx.images-amazon.com/images/I/*/[%0-9a-zA-Z.,-]*.jpg'`
fi
if [ "x" != "x$coverurl" ]
then
echo "Cover URL: [$coverurl]"
# Save the imager
wget "$coverurl" -O "$dir/cover.jpg" 2$> /dev/null
fi
done