summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline-fail.bb18
-rw-r--r--meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.bb2
-rw-r--r--meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.inc16
-rw-r--r--meta/classes-global/base.bbclass33
-rw-r--r--meta/classes-global/package.bbclass12
-rw-r--r--meta/classes/externalsrc.bbclass4
-rw-r--r--meta/conf/bitbake.conf5
-rw-r--r--meta/lib/oeqa/selftest/cases/fetch.py2
8 files changed, 45 insertions, 47 deletions
diff --git a/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline-fail.bb b/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline-fail.bb
index 602e895199..5146129666 100644
--- a/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline-fail.bb
+++ b/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline-fail.bb
@@ -1,16 +1,4 @@
-SUMMARY = "Test recipe for fetching git submodules"
-HOMEPAGE = "https://git.yoctoproject.org/git/matchbox-panel-2"
-LICENSE = "GPL-2.0-or-later"
-LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f"
+require gitunpackoffline.inc
-INHIBIT_DEFAULT_DEPS = "1"
-
-TAGVALUE = "2.10"
-
-# Deliberately have a tag which has to be resolved but ensure do_unpack doesn't access the network again.
-SRC_URI = "git://git.yoctoproject.org/git/matchbox-panel-2;branch=master;protocol=https"
-SRC_URI:append:gitunpack-enable-recipe = ";tag=${TAGVALUE}"
-SRCREV = "f82ca3f42510fb3ef10f598b393eb373a2c34ca7"
-SRCREV:gitunpack-enable-recipe = ""
-
-S = "${WORKDIR}/git"
+# Clear the base.bbclass magic srcrev call
+fetcher_hashes_dummyfunc[vardepvalue] = ""
diff --git a/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.bb b/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.bb
index 597c89b199..b051b5da5a 100644
--- a/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.bb
+++ b/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.bb
@@ -1,4 +1,4 @@
-require gitunpackoffline-fail.bb
+require gitunpackoffline.inc
TAGVALUE = "2.11"
diff --git a/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.inc b/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.inc
new file mode 100644
index 0000000000..602e895199
--- /dev/null
+++ b/meta-selftest/recipes-test/gitunpackoffline/gitunpackoffline.inc
@@ -0,0 +1,16 @@
+SUMMARY = "Test recipe for fetching git submodules"
+HOMEPAGE = "https://git.yoctoproject.org/git/matchbox-panel-2"
+LICENSE = "GPL-2.0-or-later"
+LIC_FILES_CHKSUM = "file://COPYING;md5=94d55d512a9ba36caa9b7df079bae19f"
+
+INHIBIT_DEFAULT_DEPS = "1"
+
+TAGVALUE = "2.10"
+
+# Deliberately have a tag which has to be resolved but ensure do_unpack doesn't access the network again.
+SRC_URI = "git://git.yoctoproject.org/git/matchbox-panel-2;branch=master;protocol=https"
+SRC_URI:append:gitunpack-enable-recipe = ";tag=${TAGVALUE}"
+SRCREV = "f82ca3f42510fb3ef10f598b393eb373a2c34ca7"
+SRCREV:gitunpack-enable-recipe = ""
+
+S = "${WORKDIR}/git"
diff --git a/meta/classes-global/base.bbclass b/meta/classes-global/base.bbclass
index cbda8d12f0..7c774d250f 100644
--- a/meta/classes-global/base.bbclass
+++ b/meta/classes-global/base.bbclass
@@ -126,11 +126,18 @@ def setup_hosttools_dir(dest, toolsvar, d, fatal=True):
if notfound and fatal:
bb.fatal("The following required tools (as specified by HOSTTOOLS) appear to be unavailable in PATH, please install them in order to proceed:\n %s" % " ".join(notfound))
+# We can't use vardepvalue against do_fetch directly since that would overwrite
+# the other task dependencies so we use an indirect function.
+python fetcher_hashes_dummyfunc() {
+ return
+}
+fetcher_hashes_dummyfunc[vardepvalue] = "${@bb.fetch.get_hashvalue(d)}"
+
addtask fetch
do_fetch[dirs] = "${DL_DIR}"
do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}"
do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}"
-do_fetch[vardeps] += "SRCREV"
+do_fetch[prefuncs] += "fetcher_hashes_dummyfunc"
do_fetch[network] = "1"
python base_do_fetch() {
@@ -606,7 +613,6 @@ python () {
bb.debug(1, "Skipping recipe %s because of incompatible license(s): %s" % (pn, ' '.join(incompatible_lic)))
raise bb.parse.SkipRecipe("it has incompatible license(s): %s" % ' '.join(incompatible_lic))
- needsrcrev = False
srcuri = d.getVar('SRC_URI')
for uri_string in srcuri.split():
uri = bb.fetch.URI(uri_string)
@@ -619,24 +625,17 @@ python () {
# Svn packages should DEPEND on subversion-native
if uri.scheme == "svn":
- needsrcrev = True
d.appendVarFlag('do_fetch', 'depends', ' subversion-native:do_populate_sysroot')
# Git packages should DEPEND on git-native
elif uri.scheme in ("git", "gitsm"):
- needsrcrev = True
d.appendVarFlag('do_fetch', 'depends', ' git-native:do_populate_sysroot')
# Mercurial packages should DEPEND on mercurial-native
elif uri.scheme == "hg":
- needsrcrev = True
d.appendVar("EXTRANATIVEPATH", ' python3-native ')
d.appendVarFlag('do_fetch', 'depends', ' mercurial-native:do_populate_sysroot')
- # Perforce packages support SRCREV = "${AUTOREV}"
- elif uri.scheme == "p4":
- needsrcrev = True
-
# OSC packages should DEPEND on osc-native
elif uri.scheme == "osc":
d.appendVarFlag('do_fetch', 'depends', ' osc-native:do_populate_sysroot')
@@ -645,7 +644,6 @@ python () {
d.appendVarFlag('do_fetch', 'depends', ' nodejs-native:do_populate_sysroot')
elif uri.scheme == "repo":
- needsrcrev = True
d.appendVarFlag('do_fetch', 'depends', ' repo-native:do_populate_sysroot')
# *.lz4 should DEPEND on lz4-native for unpacking
@@ -676,21 +674,6 @@ python () {
elif path.endswith('.deb'):
d.appendVarFlag('do_unpack', 'depends', ' xz-native:do_populate_sysroot')
- if needsrcrev:
- d.setVar("SRCPV", "${@bb.fetch2.get_srcrev(d)}")
-
- # Gather all named SRCREVs to add to the sstate hash calculation
- # This anonymous python snippet is called multiple times so we
- # need to be careful to not double up the appends here and cause
- # the base hash to mismatch the task hash
- for uri in srcuri.split():
- parm = bb.fetch.decodeurl(uri)[5]
- uri_names = parm.get("name", "").split(",")
- for uri_name in filter(None, uri_names):
- srcrev_name = "SRCREV_{}".format(uri_name)
- if srcrev_name not in (d.getVarFlag("do_fetch", "vardeps") or "").split():
- d.appendVarFlag("do_fetch", "vardeps", " {}".format(srcrev_name))
-
set_packagetriplet(d)
# 'multimachine' handling
diff --git a/meta/classes-global/package.bbclass b/meta/classes-global/package.bbclass
index e8055a9cdc..0338a5c690 100644
--- a/meta/classes-global/package.bbclass
+++ b/meta/classes-global/package.bbclass
@@ -315,13 +315,21 @@ python package_get_auto_pr() {
# Package functions suitable for inclusion in PACKAGEFUNCS
#
-python package_convert_pr_autoinc() {
+python package_setup_pkgv() {
pkgv = d.getVar("PKGV")
+ # Expand SRCPV into PKGV if not present
+ srcpv = bb.fetch.get_pkgv_string(d)
+ if srcpv and "+" in pkgv:
+ d.appendVar("PKGV", srcpv)
+ pkgv = d.getVar("PKGV")
# Adjust pkgv as necessary...
if 'AUTOINC' in pkgv:
d.setVar("PKGV", pkgv.replace("AUTOINC", "${PRSERV_PV_AUTOINC}"))
+}
+
+python package_convert_pr_autoinc() {
# Change PRSERV_PV_AUTOINC and EXTENDPRAUTO usage to special values
d.setVar('PRSERV_PV_AUTOINC', '@PRSERV_PV_AUTOINC@')
d.setVar('EXTENDPRAUTO', '@EXTENDPRAUTO@')
@@ -494,6 +502,7 @@ python do_package () {
oe.qa.handle_error("var-undefined", msg, d)
return
+ bb.build.exec_func("package_setup_pkgv", d)
bb.build.exec_func("package_convert_pr_autoinc", d)
# Check for conflict between renamed packages and existing ones
@@ -577,6 +586,7 @@ addtask do_package_setscene
# Copy from PKGDESTWORK to tempdirectory as tempdirectory can be cleaned at both
# do_package_setscene and do_packagedata_setscene leading to races
python do_packagedata () {
+ bb.build.exec_func("package_setup_pkgv", d)
bb.build.exec_func("package_get_auto_pr", d)
src = d.expand("${PKGDESTWORK}")
diff --git a/meta/classes/externalsrc.bbclass b/meta/classes/externalsrc.bbclass
index aedd78a03a..322734f7ac 100644
--- a/meta/classes/externalsrc.bbclass
+++ b/meta/classes/externalsrc.bbclass
@@ -63,6 +63,7 @@ python () {
else:
d.setVar('B', '${WORKDIR}/${BPN}-${PV}')
+ bb.fetch.get_hashvalue(d)
local_srcuri = []
fetch = bb.fetch2.Fetch((d.getVar('SRC_URI') or '').split(), d)
for url in fetch.urls:
@@ -126,6 +127,9 @@ python () {
d.setVarFlag('do_compile', 'file-checksums', '${@srctree_hash_files(d)}')
d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}')
+ d.appendVarFlag('do_compile', 'prefuncs', ' fetcher_hashes_dummyfunc')
+ d.appendVarFlag('do_configure', 'prefuncs', ' fetcher_hashes_dummyfunc')
+
# We don't want the workdir to go away
d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN'))
diff --git a/meta/conf/bitbake.conf b/meta/conf/bitbake.conf
index 599bbc4ba8..c7ada2c49e 100644
--- a/meta/conf/bitbake.conf
+++ b/meta/conf/bitbake.conf
@@ -735,10 +735,7 @@ SRC_URI[vardepsexclude] += "\
SRCDATE = "${DATE}"
SRCREV ??= "INVALID"
AUTOREV = "${@bb.fetch2.get_autorev(d)}"
-AUTOREV[vardepvalue] = "${SRCPV}"
-# Set Dynamically in base.bbclass
-# SRCPV = "${@bb.fetch2.get_srcrev(d)}"
-SRCPV[vardepvalue] = "${SRCPV}"
+SRCPV = ""
SRC_URI = ""
diff --git a/meta/lib/oeqa/selftest/cases/fetch.py b/meta/lib/oeqa/selftest/cases/fetch.py
index c9107022c8..44099176fc 100644
--- a/meta/lib/oeqa/selftest/cases/fetch.py
+++ b/meta/lib/oeqa/selftest/cases/fetch.py
@@ -103,7 +103,7 @@ class Dependencies(OESelftestTestCase):
r = """
LICENSE="CLOSED"
- SRC_URI="git://example.com/repo;branch=master"
+ SRC_URI="git://example.com/repo;branch=master;rev=ffffffffffffffffffffffffffffffffffffffff"
"""
f = self.write_recipe(textwrap.dedent(r), tempdir)
d = tinfoil.parse_recipe_file(f)