448d1d1e69
For packages that use a version control repository rather than a pre-made tarball, the directory prefix used inside the tarball is currently FOO_BASE_NAME, which can be 'foo' or 'host-foo'. This means that the hash of such tarball will be different for target and host packages, even though the contents are exactly the same. Hence, if the hash file is created based on 'foo', and later a fresh build is made where 'host-foo' happens to be built before 'foo' (with a different config, for example), the hash will be detected as incorrect and a new download is started. This problem does not affect many packages/users, due to the number of conditions to be met: - the package should be available for target _and_ host - the package needs to use a VCS download method, e.g. git, hg, svn, ... This does not include standard github downloads, which download a pre-made archive. - there should be a hash file containing the hash of the downloaded archive. Since normally there is no hash file for packages with sources coming from a version control system, this restricts even further. Some examples of packages in this category that do have a hash file (but not necessarily match the earlier conditions): expedite, vexpress-firmware, squashfs, ... - the archive needs to be stored in a 'primary site' after initial archiving and thus be downloaded later using a non-version-controlled method, like wget or scp. This is because the version control download methods do not receive a '-H' parameter pointing to the hash file and thus no hashes are checked at all even if the file is present. While packages matching the third condition could be considered to be 'wrong' and need to be fixed, it does actually makes sense to have a hash file for packages from version control, in particular if they are stored in a primary site as mentioned in the last condition. Regardless of any different opinions on the previous paragraph, it is also not conceptually correct that a tarball of a package source can contain a Buildroot-specific directory prefix 'host-'. Therefore, use FOO_RAW_BASE_NAME instead of FOO_BASE_NAME when calling the dl-wrapper. Example test scenario that exhibits the problem: $ rm -rf /tmp/dl dl/squashfs-9c1db6d13a51a2e009f0027ef336ce03624eac0d.tar.gz $ make qemu_x86_64_defconfig $ make host-squashfs-dirclean host-squashfs-source $ mkdir /tmp/dl $ mv dl/squashfs-9c1db6d13a51a2e009f0027ef336ce03624eac0d.tar.gz /tmp/dl/ $ sed -i -e 's,BR2_PRIMARY_SITE=.*,BR2_PRIMARY_SITE="file:///tmp/dl",' \ -e '/BR2_PRIMARY_SITE/aBR2_PRIMARY_SITE_ONLY=y' .config $ make host-squashfs-dirclean host-squashfs-source Signed-off-by: Thomas De Schampheleire <thomas.de.schampheleire@gmail.com> Reviewed-by: Arnout Vandecappelle (Essensium/Mind) <arnout@mind.be> Signed-off-by: Thomas Petazzoni <thomas.petazzoni@free-electrons.com>
252 lines
7.5 KiB
Makefile
252 lines
7.5 KiB
Makefile
################################################################################
|
|
#
|
|
# This file contains the download helpers for the various package
|
|
# infrastructures. It is used to handle downloads from HTTP servers,
|
|
# FTP servers, Git repositories, Subversion repositories, Mercurial
|
|
# repositories, Bazaar repositories, and SCP servers.
|
|
#
|
|
################################################################################
|
|
|
|
# Download method commands
|
|
export WGET := $(call qstrip,$(BR2_WGET))
|
|
export SVN := $(call qstrip,$(BR2_SVN))
|
|
export CVS := $(call qstrip,$(BR2_CVS))
|
|
export BZR := $(call qstrip,$(BR2_BZR))
|
|
export GIT := $(call qstrip,$(BR2_GIT))
|
|
export HG := $(call qstrip,$(BR2_HG))
|
|
export SCP := $(call qstrip,$(BR2_SCP))
|
|
SSH := $(call qstrip,$(BR2_SSH))
|
|
export LOCALFILES := $(call qstrip,$(BR2_LOCALFILES))
|
|
|
|
DL_WRAPPER = support/download/dl-wrapper
|
|
|
|
# DL_DIR may have been set already from the environment
|
|
ifeq ($(origin DL_DIR),undefined)
|
|
DL_DIR ?= $(call qstrip,$(BR2_DL_DIR))
|
|
ifeq ($(DL_DIR),)
|
|
DL_DIR := $(TOPDIR)/dl
|
|
endif
|
|
else
|
|
# Restore the BR2_DL_DIR that was overridden by the .config file
|
|
BR2_DL_DIR = $(DL_DIR)
|
|
endif
|
|
|
|
# ensure it exists and a absolute path
|
|
DL_DIR := $(shell mkdir -p $(DL_DIR) && cd $(DL_DIR) >/dev/null && pwd)
|
|
|
|
#
|
|
# URI scheme helper functions
|
|
# Example URIs:
|
|
# * http://www.example.com/dir/file
|
|
# * scp://www.example.com:dir/file (with domainseparator :)
|
|
#
|
|
# geturischeme: http
|
|
geturischeme = $(firstword $(subst ://, ,$(call qstrip,$(1))))
|
|
# stripurischeme: www.example.com/dir/file
|
|
stripurischeme = $(lastword $(subst ://, ,$(call qstrip,$(1))))
|
|
# domain: www.example.com
|
|
domain = $(firstword $(subst $(call domainseparator,$(2)), ,$(call stripurischeme,$(1))))
|
|
# notdomain: dir/file
|
|
notdomain = $(patsubst $(call domain,$(1),$(2))$(call domainseparator,$(2))%,%,$(call stripurischeme,$(1)))
|
|
#
|
|
# default domainseparator is /, specify alternative value as first argument
|
|
domainseparator = $(if $(1),$(1),/)
|
|
|
|
# github(user,package,version): returns site of GitHub repository
|
|
github = https://github.com/$(1)/$(2)/archive/$(3)
|
|
|
|
# Expressly do not check hashes for those files
|
|
# Exported variables default to immediately expanded in some versions of
|
|
# make, but we need it to be recursively-epxanded, so explicitly assign it.
|
|
export BR_NO_CHECK_HASH_FOR =
|
|
|
|
################################################################################
|
|
# The DOWNLOAD_* helpers are in charge of getting a working copy
|
|
# of the source repository for their corresponding SCM,
|
|
# checking out the requested version / commit / tag, and create an
|
|
# archive out of it. DOWNLOAD_SCP uses scp to obtain a remote file with
|
|
# ssh authentication. DOWNLOAD_WGET is the normal wget-based download
|
|
# mechanism.
|
|
#
|
|
# The SOURCE_CHECK_* helpers are in charge of simply checking that the source
|
|
# is available for download. This can be used to make sure one will be able
|
|
# to get all the sources needed for one's build configuration.
|
|
################################################################################
|
|
|
|
define DOWNLOAD_GIT
|
|
$(EXTRA_ENV) $(DL_WRAPPER) -b git \
|
|
-o $(DL_DIR)/$($(PKG)_SOURCE) \
|
|
$(if $($(PKG)_GIT_SUBMODULES),-r) \
|
|
$(QUIET) \
|
|
-- \
|
|
$($(PKG)_SITE) \
|
|
$($(PKG)_DL_VERSION) \
|
|
$($(PKG)_RAW_BASE_NAME) \
|
|
$($(PKG)_DL_OPTS)
|
|
endef
|
|
|
|
# TODO: improve to check that the given PKG_DL_VERSION exists on the remote
|
|
# repository
|
|
define SOURCE_CHECK_GIT
|
|
$(GIT) ls-remote --heads $($(PKG)_SITE) > /dev/null
|
|
endef
|
|
|
|
define DOWNLOAD_BZR
|
|
$(EXTRA_ENV) $(DL_WRAPPER) -b bzr \
|
|
-o $(DL_DIR)/$($(PKG)_SOURCE) \
|
|
$(QUIET) \
|
|
-- \
|
|
$($(PKG)_SITE) \
|
|
$($(PKG)_DL_VERSION) \
|
|
$($(PKG)_RAW_BASE_NAME) \
|
|
$($(PKG)_DL_OPTS)
|
|
endef
|
|
|
|
define SOURCE_CHECK_BZR
|
|
$(BZR) ls --quiet $($(PKG)_SITE) > /dev/null
|
|
endef
|
|
|
|
define DOWNLOAD_CVS
|
|
$(EXTRA_ENV) $(DL_WRAPPER) -b cvs \
|
|
-o $(DL_DIR)/$($(PKG)_SOURCE) \
|
|
$(QUIET) \
|
|
-- \
|
|
$(call stripurischeme,$(call qstrip,$($(PKG)_SITE))) \
|
|
$($(PKG)_DL_VERSION) \
|
|
$($(PKG)_RAWNAME) \
|
|
$($(PKG)_RAW_BASE_NAME) \
|
|
$($(PKG)_DL_OPTS)
|
|
endef
|
|
|
|
# Not all CVS servers support ls/rls, use login to see if we can connect
|
|
define SOURCE_CHECK_CVS
|
|
$(CVS) -d:pserver:anonymous:@$(call stripurischeme,$(call qstrip,$($(PKG)_SITE))) login
|
|
endef
|
|
|
|
define DOWNLOAD_SVN
|
|
$(EXTRA_ENV) $(DL_WRAPPER) -b svn \
|
|
-o $(DL_DIR)/$($(PKG)_SOURCE) \
|
|
$(QUIET) \
|
|
-- \
|
|
$($(PKG)_SITE) \
|
|
$($(PKG)_DL_VERSION) \
|
|
$($(PKG)_RAW_BASE_NAME) \
|
|
$($(PKG)_DL_OPTS)
|
|
endef
|
|
|
|
define SOURCE_CHECK_SVN
|
|
$(SVN) ls $($(PKG)_SITE)@$($(PKG)_DL_VERSION) > /dev/null
|
|
endef
|
|
|
|
# SCP URIs should be of the form scp://[user@]host:filepath
|
|
# Note that filepath is relative to the user's home directory, so you may want
|
|
# to prepend the path with a slash: scp://[user@]host:/absolutepath
|
|
define DOWNLOAD_SCP
|
|
$(EXTRA_ENV) $(DL_WRAPPER) -b scp \
|
|
-o $(DL_DIR)/$(2) \
|
|
-H $(PKGDIR)/$($(PKG)_RAWNAME).hash \
|
|
$(QUIET) \
|
|
-- \
|
|
'$(call stripurischeme,$(call qstrip,$(1)))' \
|
|
$($(PKG)_DL_OPTS)
|
|
endef
|
|
|
|
define SOURCE_CHECK_SCP
|
|
$(SSH) $(call domain,$(1),:) ls '$(call notdomain,$(1),:)' > /dev/null
|
|
endef
|
|
|
|
define DOWNLOAD_HG
|
|
$(EXTRA_ENV) $(DL_WRAPPER) -b hg \
|
|
-o $(DL_DIR)/$($(PKG)_SOURCE) \
|
|
$(QUIET) \
|
|
-- \
|
|
$($(PKG)_SITE) \
|
|
$($(PKG)_DL_VERSION) \
|
|
$($(PKG)_RAW_BASE_NAME) \
|
|
$($(PKG)_DL_OPTS)
|
|
endef
|
|
|
|
# TODO: improve to check that the given PKG_DL_VERSION exists on the remote
|
|
# repository
|
|
define SOURCE_CHECK_HG
|
|
$(HG) incoming --force -l1 $($(PKG)_SITE) > /dev/null
|
|
endef
|
|
|
|
define DOWNLOAD_WGET
|
|
$(EXTRA_ENV) $(DL_WRAPPER) -b wget \
|
|
-o $(DL_DIR)/$(2) \
|
|
-H $(PKGDIR)/$($(PKG)_RAWNAME).hash \
|
|
$(QUIET) \
|
|
-- \
|
|
'$(call qstrip,$(1))' \
|
|
$($(PKG)_DL_OPTS)
|
|
endef
|
|
|
|
define SOURCE_CHECK_WGET
|
|
$(WGET) --spider '$(call qstrip,$(1))'
|
|
endef
|
|
|
|
define DOWNLOAD_LOCALFILES
|
|
$(EXTRA_ENV) $(DL_WRAPPER) -b cp \
|
|
-o $(DL_DIR)/$(2) \
|
|
-H $(PKGDIR)/$($(PKG)_RAWNAME).hash \
|
|
$(QUIET) \
|
|
-- \
|
|
$(call stripurischeme,$(call qstrip,$(1))) \
|
|
$($(PKG)_DL_OPTS)
|
|
endef
|
|
|
|
define SOURCE_CHECK_LOCALFILES
|
|
test -e $(call stripurischeme,$(call qstrip,$(1)))
|
|
endef
|
|
|
|
################################################################################
|
|
# DOWNLOAD -- Download helper. Will try to download source from:
|
|
# 1) BR2_PRIMARY_SITE if enabled
|
|
# 2) Download site, unless BR2_PRIMARY_SITE_ONLY is set
|
|
# 3) BR2_BACKUP_SITE if enabled, unless BR2_PRIMARY_SITE_ONLY is set
|
|
#
|
|
# Argument 1 is the source location
|
|
#
|
|
# E.G. use like this:
|
|
# $(call DOWNLOAD,$(FOO_SITE))
|
|
################################################################################
|
|
|
|
define DOWNLOAD
|
|
$(call DOWNLOAD_INNER,$(1),$(notdir $(1)),DOWNLOAD)
|
|
endef
|
|
|
|
define SOURCE_CHECK
|
|
$(call DOWNLOAD_INNER,$(1),$(notdir $(1)),SOURCE_CHECK)
|
|
endef
|
|
|
|
define DOWNLOAD_INNER
|
|
$(Q)$(if $(filter bzr cvs git hg svn,$($(PKG)_SITE_METHOD)),export BR_NO_CHECK_HASH_FOR=$(2);) \
|
|
if test -n "$(call qstrip,$(BR2_PRIMARY_SITE))" ; then \
|
|
case "$(call geturischeme,$(BR2_PRIMARY_SITE))" in \
|
|
file) $(call $(3)_LOCALFILES,$(BR2_PRIMARY_SITE)/$(2),$(2)) && exit ;; \
|
|
scp) $(call $(3)_SCP,$(BR2_PRIMARY_SITE)/$(2),$(2)) && exit ;; \
|
|
*) $(call $(3)_WGET,$(BR2_PRIMARY_SITE)/$(2),$(2)) && exit ;; \
|
|
esac ; \
|
|
fi ; \
|
|
if test "$(BR2_PRIMARY_SITE_ONLY)" = "y" ; then \
|
|
exit 1 ; \
|
|
fi ; \
|
|
if test -n "$(1)" ; then \
|
|
case "$($(PKG)_SITE_METHOD)" in \
|
|
git) $($(3)_GIT) && exit ;; \
|
|
svn) $($(3)_SVN) && exit ;; \
|
|
cvs) $($(3)_CVS) && exit ;; \
|
|
bzr) $($(3)_BZR) && exit ;; \
|
|
file) $($(3)_LOCALFILES) && exit ;; \
|
|
scp) $($(3)_SCP) && exit ;; \
|
|
hg) $($(3)_HG) && exit ;; \
|
|
*) $(call $(3)_WGET,$(1),$(2)) && exit ;; \
|
|
esac ; \
|
|
fi ; \
|
|
if test -n "$(call qstrip,$(BR2_BACKUP_SITE))" ; then \
|
|
$(call $(3)_WGET,$(BR2_BACKUP_SITE)/$(2),$(2)) && exit ; \
|
|
fi ; \
|
|
exit 1
|
|
endef
|