kumquat-buildroot/package/pkg-download.mk
Yann E. MORIN ea55e13236 core/pkg-infra: don't enforce site-method for extra downloads
The site method only ever applies to the main download, while extra
downloads are always to be fetched with wget.

However, the site method is prepended to the URL from within the
DOWNLOAD macro (well, a variable evaluated in the DOWNLOAD macro),
which is called for each download of a package, thus effectively
prepending the site method to all downloads, even the extra ones (and
the patches).

We fix that by prepending the site method from within the
generic-package infra, so that it only applies to the main download.

For that, we move the main _SOURCE out of the foreach loop, so that
we can prepend the site-method to it, without impacting the other
downloads.

Reported-by: Luca Ceresoli <luca@lucaceresoli.net>
Signed-off-by: "Yann E. MORIN" <yann.morin.1998@free.fr>
Cc: Luca Ceresoli <luca@lucaceresoli.net>
Cc: Thomas Petazzoni <thomas.petazzoni@bootlin.com>
Cc: Maxime Hadjinlian <maxime.hadjinlian@gmail.com>
Reviewed-by: Luca Ceresoli <luca@lucaceresoli.net>
Tested-by: Luca Ceresoli <luca@lucaceresoli.net>
Signed-off-by: Thomas Petazzoni <thomas.petazzoni@bootlin.com>
2018-05-01 23:00:16 +02:00

110 lines
3.8 KiB
Makefile

################################################################################
#
# This file contains the download helpers for the various package
# infrastructures. It is used to handle downloads from HTTP servers,
# FTP servers, Git repositories, Subversion repositories, Mercurial
# repositories, Bazaar repositories, and SCP servers.
#
################################################################################
# Download method commands
export WGET := $(call qstrip,$(BR2_WGET))
export SVN := $(call qstrip,$(BR2_SVN))
export CVS := $(call qstrip,$(BR2_CVS))
export BZR := $(call qstrip,$(BR2_BZR))
export GIT := $(call qstrip,$(BR2_GIT))
export HG := $(call qstrip,$(BR2_HG))
export SCP := $(call qstrip,$(BR2_SCP))
SSH := $(call qstrip,$(BR2_SSH))
export LOCALFILES := $(call qstrip,$(BR2_LOCALFILES))
DL_WRAPPER = support/download/dl-wrapper
FLOCK = flock $($(PKG)_DL_DIR)/
# DL_DIR may have been set already from the environment
ifeq ($(origin DL_DIR),undefined)
DL_DIR ?= $(call qstrip,$(BR2_DL_DIR))
ifeq ($(DL_DIR),)
DL_DIR := $(TOPDIR)/dl
endif
else
# Restore the BR2_DL_DIR that was overridden by the .config file
BR2_DL_DIR = $(DL_DIR)
endif
# ensure it exists and a absolute path
DL_DIR := $(shell mkdir -p $(DL_DIR) && cd $(DL_DIR) >/dev/null && pwd)
#
# URI scheme helper functions
# Example URIs:
# * http://www.example.com/dir/file
# * scp://www.example.com:dir/file (with domainseparator :)
#
# geturischeme: http
geturischeme = $(firstword $(subst ://, ,$(call qstrip,$(1))))
# getschemeplusuri: git|parameter+http://example.com
getschemeplusuri = $(call geturischeme,$(1))$(if $(2),\|$(2))+$(1)
# stripurischeme: www.example.com/dir/file
stripurischeme = $(lastword $(subst ://, ,$(call qstrip,$(1))))
# domain: www.example.com
domain = $(firstword $(subst $(call domainseparator,$(2)), ,$(call stripurischeme,$(1))))
# notdomain: dir/file
notdomain = $(patsubst $(call domain,$(1),$(2))$(call domainseparator,$(2))%,%,$(call stripurischeme,$(1)))
#
# default domainseparator is /, specify alternative value as first argument
domainseparator = $(if $(1),$(1),/)
# github(user,package,version): returns site of GitHub repository
github = https://github.com/$(1)/$(2)/archive/$(3)
# Expressly do not check hashes for those files
# Exported variables default to immediately expanded in some versions of
# make, but we need it to be recursively-epxanded, so explicitly assign it.
export BR_NO_CHECK_HASH_FOR =
################################################################################
# DOWNLOAD -- Download helper. Will call DL_WRAPPER which will try to download
# source from:
# 1) BR2_PRIMARY_SITE if enabled
# 2) Download site, unless BR2_PRIMARY_SITE_ONLY is set
# 3) BR2_BACKUP_SITE if enabled, unless BR2_PRIMARY_SITE_ONLY is set
#
# Argument 1 is the source location
#
################################################################################
ifneq ($(call qstrip,$(BR2_PRIMARY_SITE)),)
DOWNLOAD_URIS += \
-u $(call getschemeplusuri,$(call qstrip,$(BR2_PRIMARY_SITE)/$($(PKG)_DL_SUBDIR)),urlencode) \
-u $(call getschemeplusuri,$(call qstrip,$(BR2_PRIMARY_SITE)),urlencode)
endif
ifeq ($(BR2_PRIMARY_SITE_ONLY),)
DOWNLOAD_URIS += \
-u $(patsubst %/,%,$(dir $(call qstrip,$(1))))
ifneq ($(call qstrip,$(BR2_BACKUP_SITE)),)
DOWNLOAD_URIS += \
-u $(call getschemeplusuri,$(call qstrip,$(BR2_BACKUP_SITE)/$($(PKG)_DL_SUBDIR)),urlencode) \
-u $(call getschemeplusuri,$(call qstrip,$(BR2_BACKUP_SITE)),urlencode)
endif
endif
define DOWNLOAD
$(Q)mkdir -p $($(PKG)_DL_DIR)
$(EXTRA_ENV) $(FLOCK) $(DL_WRAPPER) \
-c '$($(PKG)_DL_VERSION)' \
-d '$($(PKG)_DL_DIR)' \
-D '$(DL_DIR)' \
-f '$(notdir $(1))' \
-H '$(PKGDIR)/$($(PKG)_RAWNAME).hash' \
-n '$($(PKG)_BASENAME_RAW)' \
-N '$($(PKG)_RAWNAME)' \
-o '$($(PKG)_DL_DIR)/$(notdir $(1))' \
$(if $($(PKG)_GIT_SUBMODULES),-r) \
$(DOWNLOAD_URIS) \
$(QUIET) \
-- \
$($(PKG)_DL_OPTS)
endef