fix: wget recursive workaround
The recursive feature of Wget is being blocked in OpenCI due to the
Robot Exclusive Standard being observed. This patch adds the command
line argument for wget to block the robots.txt file from being observed,
allowing the fetch_directory to work as intended.
Signed-off-by: Lauren Wehrmeister <lauren.wehrmeister@arm.com>
Change-Id: I1e6449fffffec7cda1475b528e9a9f029743d299
diff --git a/utils.sh b/utils.sh
index 17eee28..1c5c442 100644
--- a/utils.sh
+++ b/utils.sh
@@ -108,7 +108,7 @@
local cut_dirs="$(echo "$modified_url" | awk -F/ '{print NF - 5}')"
sa="${saveas:-$base}"
echo "Fetch: $modified_url -> $sa"
- wget -rq -nH --cut-dirs="$cut_dirs" --no-parent \
+ wget -rq -nH --cut-dirs="$cut_dirs" --no-parent -e robots=off \
--reject="index.html*" "$modified_url"
if [ "$sa" != "$base" ]; then
mv "$base" "$sa"