summaryrefslogtreecommitdiff
path: root/poky/bitbake/lib/bb/fetch2
diff options
context:
space:
mode:
Diffstat (limited to 'poky/bitbake/lib/bb/fetch2')
-rw-r--r--poky/bitbake/lib/bb/fetch2/gitsm.py45
-rw-r--r--poky/bitbake/lib/bb/fetch2/perforce.py81
2 files changed, 116 insertions, 10 deletions
diff --git a/poky/bitbake/lib/bb/fetch2/gitsm.py b/poky/bitbake/lib/bb/fetch2/gitsm.py
index 56bd5f0480..d6e5c5c050 100644
--- a/poky/bitbake/lib/bb/fetch2/gitsm.py
+++ b/poky/bitbake/lib/bb/fetch2/gitsm.py
@@ -143,12 +143,43 @@ class GitSM(Git):
try:
# Check for the nugget dropped by the download operation
known_srcrevs = runfetchcmd("%s config --get-all bitbake.srcrev" % \
- (ud.basecmd), d, workdir=ud.clonedir)
+ (ud.basecmd), d, workdir=ud.clonedir)
- if ud.revisions[ud.names[0]] not in known_srcrevs.split():
- return True
+ if ud.revisions[ud.names[0]] in known_srcrevs.split():
+ return False
except bb.fetch2.FetchError:
- # No srcrev nuggets, so this is new and needs to be updated
+ pass
+
+ need_update_list = []
+ def need_update_submodule(ud, url, module, modpath, workdir, d):
+ url += ";bareclone=1;nobranch=1"
+
+ try:
+ newfetch = Fetch([url], d, cache=False)
+ new_ud = newfetch.ud[url]
+ if new_ud.method.need_update(new_ud, d):
+ need_update_list.append(modpath)
+ except Exception as e:
+ logger.error('gitsm: submodule update check failed: %s %s' % (type(e).__name__, str(e)))
+ need_update_result = True
+
+ # If we're using a shallow mirror tarball it needs to be unpacked
+ # temporarily so that we can examine the .gitmodules file
+ if ud.shallow and os.path.exists(ud.fullshallow) and not os.path.exists(ud.clonedir):
+ tmpdir = tempfile.mkdtemp(dir=d.getVar("DL_DIR"))
+ runfetchcmd("tar -xzf %s" % ud.fullshallow, d, workdir=tmpdir)
+ self.process_submodules(ud, tmpdir, need_update_submodule, d)
+ shutil.rmtree(tmpdir)
+ else:
+ self.process_submodules(ud, ud.clonedir, need_update_submodule, d)
+ if len(need_update_list) == 0:
+ # We already have the required commits of all submodules. Drop
+ # a nugget so we don't need to check again.
+ runfetchcmd("%s config --add bitbake.srcrev %s" % \
+ (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
+
+ if len(need_update_list) > 0:
+ logger.debug(1, 'gitsm: Submodules requiring update: %s' % (' '.join(need_update_list)))
return True
return False
@@ -163,9 +194,6 @@ class GitSM(Git):
try:
newfetch = Fetch([url], d, cache=False)
newfetch.download()
- # Drop a nugget to add each of the srcrevs we've fetched (used by need_update)
- runfetchcmd("%s config --add bitbake.srcrev %s" % \
- (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=workdir)
except Exception as e:
logger.error('gitsm: submodule download failed: %s %s' % (type(e).__name__, str(e)))
raise
@@ -181,6 +209,9 @@ class GitSM(Git):
shutil.rmtree(tmpdir)
else:
self.process_submodules(ud, ud.clonedir, download_submodule, d)
+ # Drop a nugget for the srcrev we've fetched (used by need_update)
+ runfetchcmd("%s config --add bitbake.srcrev %s" % \
+ (ud.basecmd, ud.revisions[ud.names[0]]), d, workdir=ud.clonedir)
def unpack(self, ud, destdir, d):
def unpack_submodules(ud, url, module, modpath, workdir, d):
diff --git a/poky/bitbake/lib/bb/fetch2/perforce.py b/poky/bitbake/lib/bb/fetch2/perforce.py
index f57c2a4f52..6f3c95b6ce 100644
--- a/poky/bitbake/lib/bb/fetch2/perforce.py
+++ b/poky/bitbake/lib/bb/fetch2/perforce.py
@@ -1,6 +1,20 @@
"""
BitBake 'Fetch' implementation for perforce
+Supported SRC_URI options are:
+
+- module
+ The top-level location to fetch while preserving the remote paths
+
+ The value of module can point to either a directory or a file. The result,
+ in both cases, is that the fetcher will preserve all file paths starting
+ from the module path. That is, the top-level directory in the module value
+ will also be the top-level directory in P4DIR.
+
+- remotepath
+ If the value "keep" is given, the full depot location of each file is
+ preserved in P4DIR. This option overrides the effect of the module option.
+
"""
# Copyright (C) 2003, 2004 Chris Larson
@@ -17,6 +31,36 @@ from bb.fetch2 import FetchError
from bb.fetch2 import logger
from bb.fetch2 import runfetchcmd
+class PerforceProgressHandler (bb.progress.BasicProgressHandler):
+ """
+ Implements basic progress information for perforce, based on the number of
+ files to be downloaded.
+
+ The p4 print command will print one line per file, therefore it can be used
+ to "count" the number of files already completed and give an indication of
+ the progress.
+ """
+ def __init__(self, d, num_files):
+ self._num_files = num_files
+ self._count = 0
+ super(PerforceProgressHandler, self).__init__(d)
+
+ # Send an initial progress event so the bar gets shown
+ self._fire_progress(-1)
+
+ def write(self, string):
+ self._count = self._count + 1
+
+ percent = int(100.0 * float(self._count) / float(self._num_files))
+
+ # In case something goes wrong, we try to preserve our sanity
+ if percent > 100:
+ percent = 100
+
+ self.update(percent)
+
+ super(PerforceProgressHandler, self).write(string)
+
class Perforce(FetchMethod):
""" Class to fetch from perforce repositories """
def supports(self, ud, d):
@@ -58,14 +102,33 @@ class Perforce(FetchMethod):
logger.debug(1, 'Determined P4PORT to be: %s' % ud.host)
if not ud.host:
raise FetchError('Could not determine P4PORT from P4CONFIG')
-
+
+ # Fetcher options
+ ud.module = ud.parm.get('module')
+ ud.keepremotepath = (ud.parm.get('remotepath', '') == 'keep')
+
if ud.path.find('/...') >= 0:
ud.pathisdir = True
else:
ud.pathisdir = False
+ # Avoid using the "/..." syntax in SRC_URI when a module value is given
+ if ud.pathisdir and ud.module:
+ raise FetchError('SRC_URI depot path cannot not end in /... when a module value is given')
+
cleanedpath = ud.path.replace('/...', '').replace('/', '.')
cleanedhost = ud.host.replace(':', '.')
+
+ # Merge the path and module into the final depot location
+ if ud.module:
+ if ud.module.find('/') == 0:
+ raise FetchError('module cannot begin with /')
+ ud.path = os.path.join(ud.path, ud.module)
+
+ # Append the module path to the local pkg name
+ cleanedmodule = ud.module.replace('/...', '').replace('/', '.')
+ cleanedpath += '--%s' % cleanedmodule
+
ud.pkgdir = os.path.join(ud.dldir, cleanedhost, cleanedpath)
ud.setup_revisions(d)
@@ -95,10 +158,20 @@ class Perforce(FetchMethod):
pathnrev = '%s' % (ud.path)
if depot_filename:
- if ud.pathisdir: # Remove leading path to obtain filename
+ if ud.keepremotepath:
+ # preserve everything, remove the leading //
+ filename = depot_filename.lstrip('/')
+ elif ud.module:
+ # remove everything up to the module path
+ modulepath = ud.module.rstrip('/...')
+ filename = depot_filename[depot_filename.rfind(modulepath):]
+ elif ud.pathisdir:
+ # Remove leading (visible) path to obtain the filepath
filename = depot_filename[len(ud.path)-1:]
else:
+ # Remove everything, except the filename
filename = depot_filename[depot_filename.rfind('/'):]
+
filename = filename[:filename.find('#')] # Remove trailing '#rev'
if command == 'changes':
@@ -150,10 +223,12 @@ class Perforce(FetchMethod):
bb.utils.remove(ud.pkgdir, True)
bb.utils.mkdirhier(ud.pkgdir)
+ progresshandler = PerforceProgressHandler(d, len(filelist))
+
for afile in filelist:
p4fetchcmd = self._buildp4command(ud, d, 'print', afile)
bb.fetch2.check_network_access(d, p4fetchcmd, ud.url)
- runfetchcmd(p4fetchcmd, d, workdir=ud.pkgdir)
+ runfetchcmd(p4fetchcmd, d, workdir=ud.pkgdir, log=progresshandler)
runfetchcmd('tar -czf %s p4' % (ud.localpath), d, cleanup=[ud.localpath], workdir=ud.pkgdir)