# Get a list of all links from a page
crawl() {
curl -fSsL $1 | grep -oi "href\s*=\s*[\"\'][^\"\']*[\"\']" | sed "s/^[^=]*=\s*.\(.*\).$/\1/g" | sort -uV
}
# Now do this as many times as you please
crawl | xargs -l crawl #| xargs -l crawl | xargs -l crawl ..etc