o) output="${OPTARG}";;
u) uri="${OPTARG}";;
c) cset="${OPTARG}";;
+ d) dl_dir="${OPTARG}";;
n) basename="${OPTARG}";;
:) printf "option '%s' expects a mandatory argument\n" "${OPTARG}"; exit 1;;
\?) printf "unknown option '%s'\n" "${OPTARG}" >&2; exit 1;;
eval ${GIT} "${@}"
}
-# Try a shallow clone, since it is faster than a full clone - but that only
-# works if the version is a ref (tag or branch). Before trying to do a shallow
-# clone we check if ${cset} is in the list provided by git ls-remote. If not
-# we fall back on a full clone.
+# We want to check if a cache of the git clone of this repo already exists.
+git_cache="${dl_dir}/git"
+
+# If the cache directory doesn't exists, init a new repo, which will be
+# fetch'ed later.
+if [ ! -d "${git_cache}" ]; then
+ _git init "'${git_cache}'"
+ _git -C "'${git_cache}'" remote add origin "'${uri}'"
+fi
+
+pushd "${git_cache}" >/dev/null
+
+_git remote set-url origin "'${uri}'"
+
+# Try to fetch with limited depth, since it is faster than a full clone - but
+# that only works if the version is a ref (tag or branch). Before trying to do
+# a shallow clone we check if ${cset} is in the list provided by git ls-remote.
+# If not we fallback to a full fetch.
#
-# Messages for the type of clone used are provided to ease debugging in case of
-# problems
+# Messages for the type of clone used are provided to ease debugging in
+# case of problems
git_done=0
-if [ -n "$(_git ls-remote "'${uri}'" "'${cset}'" 2>&1)" ]; then
- printf "Doing shallow clone\n"
- if _git clone ${verbose} "${@}" --depth 1 -b "'${cset}'" "'${uri}'" "'${basename}'"; then
+if [ -n "$(_git ls-remote origin "'${cset}'" 2>&1)" ]; then
+ printf "Doing a shallow fetch\n"
+ if _git fetch "${@}" --depth 1 origin "'${cset}'"; then
git_done=1
else
- printf "Shallow clone failed, falling back to doing a full clone\n"
+ printf "Shallow fetch failed, falling back to fetching all refs\n"
fi
fi
if [ ${git_done} -eq 0 ]; then
- printf "Doing full clone\n"
- _git clone ${verbose} "${@}" "'${uri}'" "'${basename}'"
+ printf "Fetching all references\n"
+ _git fetch origin -t
fi
-pushd "${basename}" >/dev/null
-
# Try to get the special refs exposed by some forges (pull-requests for
# github, changes for gerrit...). There is no easy way to know whether
# the cset the user passed us is such a special ref or a tag or a sha1
_git submodule update --init --recursive
fi
-# We do not want the .git dir; we keep other .git files, in case they
-# are the only files in their directory.
+# Generate the archive, sort with the C locale so that it is reproducible.
+# We do not want the .git dir; we keep other .git files, in case they are the
+# only files in their directory.
# The .git dir would generate non reproducible tarballs as it depends on
# the state of the remote server. It also would generate large tarballs
# (gigabytes for some linux trees) when a full clone took place.
-rm -rf .git
+find . -not -type d \
+ -and -not -path "./.git/*" >"${output}.list"
+LC_ALL=C sort <"${output}.list" >"${output}.list.sorted"
-popd >/dev/null
-
-# Generate the archive, sort with the C locale so that it is reproducible
-find "${basename}" -not -type d >"${basename}.list"
-LC_ALL=C sort <"${basename}.list" >"${basename}.list.sorted"
# Create GNU-format tarballs, since that's the format of the tarballs on
# sources.buildroot.org and used in the *.hash files
-tar cf - --numeric-owner --owner=0 --group=0 --mtime="${date}" --format=gnu \
- -T "${basename}.list.sorted" >"${output}.tar"
+tar cf - --transform="s/^\.$/${basename}/" \
+ --numeric-owner --owner=0 --group=0 --mtime="${date}" --format=gnu \
+ -T "${output}.list.sorted" >"${output}.tar"
gzip -6 -n <"${output}.tar" >"${output}"
+
+rm -f "${output}.list"
+rm -f "${output}.list.sorted"
+
+popd >/dev/null