From c8ef0c03b0b7f8238068f76a8c4f5c1c249c02e8 Mon Sep 17 00:00:00 2001 From: Maxime Hadjinlian Date: Mon, 2 Apr 2018 10:14:23 +0200 Subject: [PATCH] download: put most of the infra in dl-wrapper The goal here is to simplify the infrastructure by putting most of the code in the dl-wrapper as it is easier to implement and to read. Most of the functions were common already, this patch finalizes it by making the pkg-download.mk pass all the parameters needed to the dl-wrapper which in turn will pass everything to every backend. The backend will then cherry-pick what it needs from these arguments and act accordingly. It eases the transition to the addition of a sub directory per package in the DL_DIR, and later on, a git cache. [Peter: drop ';' in BR_NO_CHECK_HASH_FOR in DOWNLOAD macro and swap cd/rm -rf as mentioned by Yann, fix typos] Signed-off-by: Maxime Hadjinlian Tested-by: Luca Ceresoli Reviewed-by: Luca Ceresoli Reviewed-by: "Yann E. MORIN" Signed-off-by: Peter Korsgaard --- package/pkg-download.mk | 166 +++++++----------------------------- support/download/cvs | 2 +- support/download/dl-wrapper | 147 +++++++++++++++++++------------ support/download/wget | 10 ++- 4 files changed, 131 insertions(+), 194 deletions(-) diff --git a/package/pkg-download.mk b/package/pkg-download.mk index a410dce1ee..2847a99d2f 100644 --- a/package/pkg-download.mk +++ b/package/pkg-download.mk @@ -42,6 +42,8 @@ DL_DIR := $(shell mkdir -p $(DL_DIR) && cd $(DL_DIR) >/dev/null && pwd) # # geturischeme: http geturischeme = $(firstword $(subst ://, ,$(call qstrip,$(1)))) +# getschemeplusuri: git|parameter+http://example.com +getschemeplusuri = $(call geturischeme,$(1))$(if $(2),\|$(2))+$(1) # stripurischeme: www.example.com/dir/file stripurischeme = $(lastword $(subst ://, ,$(call qstrip,$(1)))) # domain: www.example.com @@ -61,152 +63,42 @@ github = https://github.com/$(1)/$(2)/archive/$(3) export BR_NO_CHECK_HASH_FOR = ################################################################################ -# The DOWNLOAD_* helpers are in charge of getting a working copy -# of the source repository for their corresponding SCM, -# checking out the requested version / commit / tag, and create an -# archive out of it. DOWNLOAD_SCP uses scp to obtain a remote file with -# ssh authentication. DOWNLOAD_WGET is the normal wget-based download -# mechanism. +# DOWNLOAD -- Download helper. Will call DL_WRAPPER which will try to download +# source from: +# 1) BR2_PRIMARY_SITE if enabled +# 2) Download site, unless BR2_PRIMARY_SITE_ONLY is set +# 3) BR2_BACKUP_SITE if enabled, unless BR2_PRIMARY_SITE_ONLY is set +# +# Argument 1 is the source location # ################################################################################ -define DOWNLOAD_GIT - $(EXTRA_ENV) $(DL_WRAPPER) -b git \ - -o $(DL_DIR)/$($(PKG)_SOURCE) \ - $(if $($(PKG)_GIT_SUBMODULES),-r) \ - -H $(PKGDIR)/$($(PKG)_RAWNAME).hash \ - $(QUIET) \ - -- \ - -u $($(PKG)_SITE) \ - -c $($(PKG)_DL_VERSION) \ - -n $($(PKG)_BASENAME_RAW) \ - $($(PKG)_DL_OPTS) -endef - -define DOWNLOAD_BZR - $(EXTRA_ENV) $(DL_WRAPPER) -b bzr \ - -o $(DL_DIR)/$($(PKG)_SOURCE) \ - $(QUIET) \ - -- \ - -u $($(PKG)_SITE) \ - -c $($(PKG)_DL_VERSION) \ - -n $($(PKG)_BASENAME_RAW) \ - $($(PKG)_DL_OPTS) -endef +ifneq ($(call qstrip,$(BR2_PRIMARY_SITE)),) +DOWNLOAD_URIS += \ + -u $(call getschemeplusuri,$(BR2_PRIMARY_SITE),urlencode) +endif -define DOWNLOAD_CVS - $(EXTRA_ENV) $(DL_WRAPPER) -b cvs \ - -o $(DL_DIR)/$($(PKG)_SOURCE) \ - $(QUIET) \ - -- \ - -u $(call stripurischeme,$(call qstrip,$($(PKG)_SITE))) \ - -c $($(PKG)_DL_VERSION) \ - -N $($(PKG)_RAWNAME) \ - -n $($(PKG)_BASENAME_RAW) \ - $($(PKG)_DL_OPTS) -endef +ifeq ($(BR2_PRIMARY_SITE_ONLY),) +DOWNLOAD_URIS += \ + -u $($(PKG)_SITE_METHOD)+$(dir $(1)) +ifneq ($(call qstrip,$(BR2_BACKUP_SITE)),) +DOWNLOAD_URIS += \ + -u $(call getschemeplusuri,$(BR2_BACKUP_SITE),urlencode) +endif +endif -define DOWNLOAD_SVN - $(EXTRA_ENV) $(DL_WRAPPER) -b svn \ - -o $(DL_DIR)/$($(PKG)_SOURCE) \ - $(QUIET) \ - -- \ - -u $($(PKG)_SITE) \ +define DOWNLOAD + $(Q)$(if $(filter bzr cvs hg svn,$($(PKG)_SITE_METHOD)),BR_NO_CHECK_HASH_FOR=$(notdir $(1))) \ + $(EXTRA_ENV) $(DL_WRAPPER) \ -c $($(PKG)_DL_VERSION) \ - -n $($(PKG)_BASENAME_RAW) \ - $($(PKG)_DL_OPTS) -endef - -# SCP URIs should be of the form scp://[user@]host:filepath -# Note that filepath is relative to the user's home directory, so you may want -# to prepend the path with a slash: scp://[user@]host:/absolutepath -define DOWNLOAD_SCP - $(EXTRA_ENV) $(DL_WRAPPER) -b scp \ - -o $(DL_DIR)/$(2) \ + -f $(notdir $(1)) \ -H $(PKGDIR)/$($(PKG)_RAWNAME).hash \ - $(QUIET) \ - -- \ - -u '$(call stripurischeme,$(call qstrip,$(1)))' \ - $($(PKG)_DL_OPTS) -endef - -define DOWNLOAD_HG - $(EXTRA_ENV) $(DL_WRAPPER) -b hg \ - -o $(DL_DIR)/$($(PKG)_SOURCE) \ - $(QUIET) \ - -- \ - -u $($(PKG)_SITE) \ - -c $($(PKG)_DL_VERSION) \ -n $($(PKG)_BASENAME_RAW) \ - $($(PKG)_DL_OPTS) -endef - -define DOWNLOAD_WGET - $(EXTRA_ENV) $(DL_WRAPPER) -b wget \ - -o $(DL_DIR)/$(2) \ - -H $(PKGDIR)/$($(PKG)_RAWNAME).hash \ - $(QUIET) \ - -- \ - -u '$(call qstrip,$(1))' \ - $($(PKG)_DL_OPTS) -endef - -define DOWNLOAD_LOCALFILES - $(EXTRA_ENV) $(DL_WRAPPER) -b cp \ - -o $(DL_DIR)/$(2) \ - -H $(PKGDIR)/$($(PKG)_RAWNAME).hash \ + -N $($(PKG)_RAWNAME) \ + -o $(DL_DIR)/$(notdir $(1)) \ + $(if $($(PKG)_GIT_SUBMODULES),-r) \ + $(DOWNLOAD_URIS) \ $(QUIET) \ -- \ - -u $(call stripurischeme,$(call qstrip,$(1))) \ $($(PKG)_DL_OPTS) endef - -################################################################################ -# DOWNLOAD -- Download helper. Will try to download source from: -# 1) BR2_PRIMARY_SITE if enabled -# 2) Download site, unless BR2_PRIMARY_SITE_ONLY is set -# 3) BR2_BACKUP_SITE if enabled, unless BR2_PRIMARY_SITE_ONLY is set -# -# Argument 1 is the source location -# -# E.G. use like this: -# $(call DOWNLOAD,$(FOO_SITE)) -# -# For PRIMARY and BACKUP site, any ? in the URL is replaced by %3F. A ? in -# the URL is used to separate query arguments, but the PRIMARY and BACKUP -# sites serve just plain files. -################################################################################ - -define DOWNLOAD - $(call DOWNLOAD_INNER,$(1),$(notdir $(1)),DOWNLOAD) -endef - -define DOWNLOAD_INNER - $(Q)$(if $(filter bzr cvs hg svn,$($(PKG)_SITE_METHOD)),export BR_NO_CHECK_HASH_FOR=$(2);) \ - if test -n "$(call qstrip,$(BR2_PRIMARY_SITE))" ; then \ - case "$(call geturischeme,$(BR2_PRIMARY_SITE))" in \ - file) $(call $(3)_LOCALFILES,$(BR2_PRIMARY_SITE)/$(2),$(2)) && exit ;; \ - scp) $(call $(3)_SCP,$(BR2_PRIMARY_SITE)/$(2),$(2)) && exit ;; \ - *) $(call $(3)_WGET,$(BR2_PRIMARY_SITE)/$(subst ?,%3F,$(2)),$(2)) && exit ;; \ - esac ; \ - fi ; \ - if test "$(BR2_PRIMARY_SITE_ONLY)" = "y" ; then \ - exit 1 ; \ - fi ; \ - if test -n "$(1)" ; then \ - case "$($(PKG)_SITE_METHOD)" in \ - git) $($(3)_GIT) && exit ;; \ - svn) $($(3)_SVN) && exit ;; \ - cvs) $($(3)_CVS) && exit ;; \ - bzr) $($(3)_BZR) && exit ;; \ - file) $($(3)_LOCALFILES) && exit ;; \ - scp) $($(3)_SCP) && exit ;; \ - hg) $($(3)_HG) && exit ;; \ - *) $(call $(3)_WGET,$(1),$(2)) && exit ;; \ - esac ; \ - fi ; \ - if test -n "$(call qstrip,$(BR2_BACKUP_SITE))" ; then \ - $(call $(3)_WGET,$(BR2_BACKUP_SITE)/$(subst ?,%3F,$(2)),$(2)) && exit ; \ - fi ; \ - exit 1 -endef diff --git a/support/download/cvs b/support/download/cvs index 69d5c71f28..3f77b849e4 100755 --- a/support/download/cvs +++ b/support/download/cvs @@ -21,7 +21,7 @@ while getopts "${BR_BACKEND_DL_GETOPTS}" OPT; do case "${OPT}" in q) verbose=-Q;; o) output="${OPTARG}";; - u) uri="${OPTARG}";; + u) uri="${OPTARG#*://}";; c) rev="${OPTARG}";; N) rawname="${OPTARG}";; n) basename="${OPTARG}";; diff --git a/support/download/dl-wrapper b/support/download/dl-wrapper index abc51f637a..cd903f92d9 100755 --- a/support/download/dl-wrapper +++ b/support/download/dl-wrapper @@ -19,31 +19,34 @@ # We want to catch any unexpected failure, and exit immediately. set -e -export BR_BACKEND_DL_GETOPTS=":hc:o:n:N:H:ru:q" +export BR_BACKEND_DL_GETOPTS=":hc:o:n:N:H:ru:qf:e" main() { local OPT OPTARG local backend output hfile recurse quiet rc + local -a uris # Parse our options; anything after '--' is for the backend - while getopts :hb:o:H:rq OPT; do + while getopts ":hc:o:n:N:H:rf:u:q" OPT; do case "${OPT}" in h) help; exit 0;; - b) backend="${OPTARG}";; + c) cset="${OPTARG}";; o) output="${OPTARG}";; + n) raw_base_name="${OPTARG}";; + N) base_name="${OPTARG}";; H) hfile="${OPTARG}";; r) recurse="-r";; + f) filename="${OPTARG}";; + u) uris+=( "${OPTARG}" );; q) quiet="-q";; :) error "option '%s' expects a mandatory argument\n" "${OPTARG}";; \?) error "unknown option '%s'\n" "${OPTARG}";; esac done + # Forget our options, and keep only those for the backend shift $((OPTIND-1)) - if [ -z "${backend}" ]; then - error "no backend specified, use -b\n" - fi if [ -z "${output}" ]; then error "no output specified, use -o\n" fi @@ -66,48 +69,85 @@ main() { warn "Re-downloading '%s'...\n" "${output##*/}" fi - # tmpd is a temporary directory in which backends may store intermediate - # by-products of the download. - # tmpf is the file in which the backends should put the downloaded content. - # tmpd is located in $(BUILD_DIR), so as not to clutter the (precious) - # $(BR2_DL_DIR) - # We let the backends create tmpf, so they are able to set whatever - # permission bits they want (although we're only really interested in - # the executable bit.) - tmpd="$(mktemp -d "${BUILD_DIR}/.${output##*/}.XXXXXX")" - tmpf="${tmpd}/output" - - # Helpers expect to run in a directory that is *really* trashable, so - # they are free to create whatever files and/or sub-dirs they might need. - # Doing the 'cd' here rather than in all backends is easier. - cd "${tmpd}" - - # If the backend fails, we can just remove the temporary directory to - # remove all the cruft it may have left behind. Then we just exit in - # error too. - if ! "${OLDPWD}/support/download/${backend}" \ - ${quiet} ${recurse} \ - -o "${tmpf}" "${@}" - then - rm -rf "${tmpd}" - exit 1 - fi - - # cd back to free the temp-dir, so we can remove it later - cd "${OLDPWD}" - - # Check if the downloaded file is sane, and matches the stored hashes - # for that file - if support/download/check-hash ${quiet} "${hfile}" "${tmpf}" "${output##*/}"; then - rc=0 - else - if [ ${?} -ne 3 ]; then + # Look through all the uris that we were given to download the package + # source + download_and_check=0 + rc=1 + for uri in "${uris[@]}"; do + backend=${uri%+*} + case "${backend}" in + git|svn|cvs|bzr|file|scp|hg) ;; + *) backend="wget" ;; + esac + uri=${uri#*+} + + urlencode=${backend#*|} + # urlencode must be "urlencode" + [ "${urlencode}" != "urlencode" ] && urlencode="" + + # tmpd is a temporary directory in which backends may store + # intermediate by-products of the download. + # tmpf is the file in which the backends should put the downloaded + # content. + # tmpd is located in $(BUILD_DIR), so as not to clutter the (precious) + # $(BR2_DL_DIR) + # We let the backends create tmpf, so they are able to set whatever + # permission bits they want (although we're only really interested in + # the executable bit.) + tmpd="$(mktemp -d "${BUILD_DIR}/.${output##*/}.XXXXXX")" + tmpf="${tmpd}/output" + + # Helpers expect to run in a directory that is *really* trashable, so + # they are free to create whatever files and/or sub-dirs they might need. + # Doing the 'cd' here rather than in all backends is easier. + cd "${tmpd}" + + # If the backend fails, we can just remove the content of the temporary + # directory to remove all the cruft it may have left behind, and try + # the next URI until it succeeds. Once out of URI to try, we need to + # cleanup and exit. + if ! "${OLDPWD}/support/download/${backend}" \ + $([ -n "${urlencode}" ] && printf %s '-e') \ + -c "${cset}" \ + -n "${raw_base_name}" \ + -N "${raw_name}" \ + -f "${filename}" \ + -u "${uri}" \ + -o "${tmpf}" \ + ${quiet} ${recurse} "${@}" + then + # cd back to keep path coherence + cd "${OLDPWD}" rm -rf "${tmpd}" - exit 1 + continue + fi + + # cd back to free the temp-dir, so we can remove it later + cd "${OLDPWD}" + + # Check if the downloaded file is sane, and matches the stored hashes + # for that file + if support/download/check-hash ${quiet} "${hfile}" "${tmpf}" "${output##*/}"; then + rc=0 + else + if [ ${?} -ne 3 ]; then + rm -rf "${tmpd}" + continue + fi + + # the hash file exists and there was no hash to check the file + # against + rc=1 fi + download_and_check=1 + break + done - # the hash file exists and there was no hash to check the file against - rc=1 + # We tried every URI possible, none seems to work or to check against the + # available hash. *ABORT MISSION* + if [ "${download_and_check}" -eq 0 ]; then + rm -rf "${tmpd}" + exit 1 fi # tmp_output is in the same directory as the final output, so we can @@ -173,16 +213,13 @@ DESCRIPTION -h This help text. - -b BACKEND - Wrap the specified BACKEND. Known backends are: - bzr Bazaar - cp Local files - cvs Concurrent Versions System - git Git - hg Mercurial - scp Secure copy - svn Subversion - wget HTTP download + -u URIs + The URI to get the file from, the URI must respect the format given in + the example. + You may give as many '-u URI' as you want, the script will stop at the + frist successful download. + + Example: backend+URI; git+http://example.com or http+http://example.com -o FILE Store the downloaded archive in FILE. diff --git a/support/download/wget b/support/download/wget index fece6663ca..c69e6071aa 100755 --- a/support/download/wget +++ b/support/download/wget @@ -8,7 +8,9 @@ set -e # Options: # -q Be quiet. # -o FILE Save into file FILE. +# -f FILENAME The filename of the tarball to get at URL # -u URL Download file at URL. +# -e ENCODE Tell wget to urlencode the filename passed to it # # Environment: # WGET : the wget command to call @@ -18,7 +20,9 @@ while getopts "${BR_BACKEND_DL_GETOPTS}" OPT; do case "${OPT}" in q) verbose=-q;; o) output="${OPTARG}";; + f) filename="${OPTARG}";; u) url="${OPTARG}";; + e) encode="-e";; :) printf "option '%s' expects a mandatory argument\n" "${OPTARG}"; exit 1;; \?) printf "unknown option '%s'\n" "${OPTARG}" >&2; exit 1;; esac @@ -32,4 +36,8 @@ _wget() { eval ${WGET} "${@}" } -_wget ${verbose} "${@}" -O "'${output}'" "'${url}'" +# Replace every '?' with '%3F' in the filename; only for the PRIMARY and BACKUP +# mirror +[ -n "${encode}" ] && filename=${filename//\?/%3F} + +_wget ${verbose} "${@}" -O "'${output}'" "'${url}/${filename}'" -- 2.30.2